summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRyan Dahl <ry@tinyclouds.org>2011-11-21 09:48:45 -0800
committerRyan Dahl <ry@tinyclouds.org>2011-11-21 10:50:52 -0800
commitb488be127a8cf1e59eb257db3f8eaf6efdb0f275 (patch)
tree83436f4f84b9651ea66c3a0d304050252916c149
parent05de01d707cd9a80f34da23445f507f5f2e2c277 (diff)
downloadandroid-node-v8-b488be127a8cf1e59eb257db3f8eaf6efdb0f275.tar.gz
android-node-v8-b488be127a8cf1e59eb257db3f8eaf6efdb0f275.tar.bz2
android-node-v8-b488be127a8cf1e59eb257db3f8eaf6efdb0f275.zip
Include NPM, update .pkg to install it.
.msi update coming soon.
-rw-r--r--LICENSE6
-rw-r--r--deps/npm/.gitignore15
-rw-r--r--deps/npm/.gitmodules51
-rw-r--r--deps/npm/.npmignore11
-rw-r--r--deps/npm/AUTHORS50
l---------deps/npm/CHANGES1
-rw-r--r--deps/npm/LICENSE61
-rw-r--r--deps/npm/Makefile125
-rw-r--r--deps/npm/README.md274
-rwxr-xr-xdeps/npm/bin/npm-cli.js77
-rw-r--r--deps/npm/bin/npm-g.cmd6
-rwxr-xr-xdeps/npm/bin/npm-get-uid-gid.js16
-rw-r--r--deps/npm/bin/npm.cmd6
-rw-r--r--deps/npm/bin/npm_g.cmd6
-rwxr-xr-xdeps/npm/bin/read-package-json.js22
-rwxr-xr-xdeps/npm/cli.js2
-rwxr-xr-xdeps/npm/configure33
l---------deps/npm/doc/api/author.md1
-rw-r--r--deps/npm/doc/api/bin.md13
-rw-r--r--deps/npm/doc/api/bugs.md19
-rw-r--r--deps/npm/doc/api/commands.md22
-rw-r--r--deps/npm/doc/api/config.md45
-rw-r--r--deps/npm/doc/api/deprecate.md32
-rw-r--r--deps/npm/doc/api/docs.md19
-rw-r--r--deps/npm/doc/api/edit.md24
-rw-r--r--deps/npm/doc/api/explore.md18
l---------deps/npm/doc/api/find.md1
l---------deps/npm/doc/api/get.md1
-rw-r--r--deps/npm/doc/api/help-search.md30
l---------deps/npm/doc/api/home.md1
-rw-r--r--deps/npm/doc/api/init.md29
-rw-r--r--deps/npm/doc/api/install.md19
-rw-r--r--deps/npm/doc/api/link.md33
l---------deps/npm/doc/api/list.md1
l---------deps/npm/doc/api/ln.md1
-rw-r--r--deps/npm/doc/api/load.md26
-rw-r--r--deps/npm/doc/api/ls.md50
-rw-r--r--deps/npm/doc/api/npm.md115
-rw-r--r--deps/npm/doc/api/outdated.md13
-rw-r--r--deps/npm/doc/api/owner.md31
-rw-r--r--deps/npm/doc/api/pack.md19
-rw-r--r--deps/npm/doc/api/prefix.md15
-rw-r--r--deps/npm/doc/api/prune.md17
-rw-r--r--deps/npm/doc/api/publish.md30
-rw-r--r--deps/npm/doc/api/rebuild.md16
-rw-r--r--deps/npm/doc/api/restart.md22
l---------deps/npm/doc/api/rm.md1
-rw-r--r--deps/npm/doc/api/root.md15
-rw-r--r--deps/npm/doc/api/run-script.md27
-rw-r--r--deps/npm/doc/api/search.md35
l---------deps/npm/doc/api/set.md1
-rw-r--r--deps/npm/doc/api/start.md13
-rw-r--r--deps/npm/doc/api/stop.md13
-rw-r--r--deps/npm/doc/api/submodule.md28
-rw-r--r--deps/npm/doc/api/tag.md23
-rw-r--r--deps/npm/doc/api/test.md16
-rw-r--r--deps/npm/doc/api/uninstall.md16
-rw-r--r--deps/npm/doc/api/unpublish.md20
-rw-r--r--deps/npm/doc/api/update.md11
-rw-r--r--deps/npm/doc/api/version.md18
-rw-r--r--deps/npm/doc/api/view.md93
-rw-r--r--deps/npm/doc/api/whoami.md15
-rw-r--r--deps/npm/doc/cli/adduser.md36
l---------deps/npm/doc/cli/author.md1
-rw-r--r--deps/npm/doc/cli/bin.md17
-rw-r--r--deps/npm/doc/cli/bugs.md38
-rw-r--r--deps/npm/doc/cli/build.md22
-rw-r--r--deps/npm/doc/cli/bundle.md14
-rw-r--r--deps/npm/doc/cli/cache.md70
-rw-r--r--deps/npm/doc/cli/changelog.md36
-rw-r--r--deps/npm/doc/cli/coding-style.md190
-rw-r--r--deps/npm/doc/cli/completion.md29
-rw-r--r--deps/npm/doc/cli/config.md665
-rw-r--r--deps/npm/doc/cli/deprecate.md24
-rw-r--r--deps/npm/doc/cli/developers.md172
-rw-r--r--deps/npm/doc/cli/docs.md38
-rw-r--r--deps/npm/doc/cli/edit.md35
-rw-r--r--deps/npm/doc/cli/explore.md40
-rw-r--r--deps/npm/doc/cli/faq.md223
l---------deps/npm/doc/cli/find.md1
-rw-r--r--deps/npm/doc/cli/folders.md209
l---------deps/npm/doc/cli/get.md1
l---------deps/npm/doc/cli/global.md1
-rw-r--r--deps/npm/doc/cli/help-search.md35
-rw-r--r--deps/npm/doc/cli/help.md38
l---------deps/npm/doc/cli/home.md1
-rw-r--r--deps/npm/doc/cli/init.md24
-rw-r--r--deps/npm/doc/cli/install.md201
-rw-r--r--deps/npm/doc/cli/json.md472
-rw-r--r--deps/npm/doc/cli/link.md57
-rw-r--r--deps/npm/doc/cli/list.md55
l---------deps/npm/doc/cli/ln.md1
l---------deps/npm/doc/cli/ls.md1
-rw-r--r--deps/npm/doc/cli/npm.md155
-rw-r--r--deps/npm/doc/cli/outdated.md17
-rw-r--r--deps/npm/doc/cli/owner.md32
-rw-r--r--deps/npm/doc/cli/pack.md25
-rw-r--r--deps/npm/doc/cli/prefix.md17
-rw-r--r--deps/npm/doc/cli/prune.md21
-rw-r--r--deps/npm/doc/cli/publish.md30
-rw-r--r--deps/npm/doc/cli/rebuild.md20
-rw-r--r--deps/npm/doc/cli/registry.md92
-rw-r--r--deps/npm/doc/cli/removing-npm.md54
-rw-r--r--deps/npm/doc/cli/restart.md22
l---------deps/npm/doc/cli/rm.md1
-rw-r--r--deps/npm/doc/cli/root.md17
-rw-r--r--deps/npm/doc/cli/run-script.md21
-rw-r--r--deps/npm/doc/cli/scripts.md182
-rw-r--r--deps/npm/doc/cli/search.md39
-rw-r--r--deps/npm/doc/cli/semver.md130
l---------deps/npm/doc/cli/set.md1
-rw-r--r--deps/npm/doc/cli/star.md22
-rw-r--r--deps/npm/doc/cli/start.md18
-rw-r--r--deps/npm/doc/cli/stop.md18
-rw-r--r--deps/npm/doc/cli/submodule.md28
-rw-r--r--deps/npm/doc/cli/tag.md17
-rw-r--r--deps/npm/doc/cli/test.md21
-rw-r--r--deps/npm/doc/cli/uninstall.md19
-rw-r--r--deps/npm/doc/cli/unpublish.md32
-rw-r--r--deps/npm/doc/cli/update.md21
-rw-r--r--deps/npm/doc/cli/version.md27
-rw-r--r--deps/npm/doc/cli/view.md85
-rw-r--r--deps/npm/doc/cli/whoami.md15
-rwxr-xr-xdeps/npm/html/api/GubbleBum-Blocky.ttfbin0 -> 10292 bytes
-rw-r--r--deps/npm/html/api/style.css336
-rwxr-xr-xdeps/npm/html/doc/GubbleBum-Blocky.ttfbin0 -> 10292 bytes
-rw-r--r--deps/npm/html/doc/style.css336
-rw-r--r--deps/npm/html/docfoot.html34
-rw-r--r--deps/npm/html/dochead.html8
-rw-r--r--deps/npm/html/favicon.icobin0 -> 7094 bytes
-rw-r--r--deps/npm/html/index.html86
-rw-r--r--deps/npm/html/n-64.pngbin0 -> 679 bytes
-rw-r--r--deps/npm/html/n-large.pngbin0 -> 699 bytes
-rw-r--r--deps/npm/html/npm-16.pngbin0 -> 145 bytes
-rw-r--r--deps/npm/html/npm-256-square.pngbin0 -> 3290 bytes
-rw-r--r--deps/npm/html/npm-256w.pngbin0 -> 3169 bytes
-rw-r--r--deps/npm/html/npm-64-square.pngbin0 -> 2908 bytes
-rw-r--r--deps/npm/html/npm-fin.pngbin0 -> 47789 bytes
-rw-r--r--deps/npm/html/npm-large.pngbin0 -> 483909 bytes
-rw-r--r--deps/npm/html/npm.pngbin0 -> 1164 bytes
-rw-r--r--deps/npm/lib/adduser.js50
-rw-r--r--deps/npm/lib/bin.js19
-rw-r--r--deps/npm/lib/bugs.js50
-rw-r--r--deps/npm/lib/build.js186
-rw-r--r--deps/npm/lib/cache.js735
-rw-r--r--deps/npm/lib/completion.js245
-rw-r--r--deps/npm/lib/config.js286
-rw-r--r--deps/npm/lib/deprecate.js48
-rw-r--r--deps/npm/lib/docs.js46
-rw-r--r--deps/npm/lib/edit.js31
-rw-r--r--deps/npm/lib/explore.js34
-rw-r--r--deps/npm/lib/faq.js8
-rw-r--r--deps/npm/lib/get.js12
-rw-r--r--deps/npm/lib/help-search.js183
-rw-r--r--deps/npm/lib/help.js150
-rw-r--r--deps/npm/lib/init.js240
-rw-r--r--deps/npm/lib/install.js608
-rw-r--r--deps/npm/lib/link.js158
-rw-r--r--deps/npm/lib/ls.js169
-rw-r--r--deps/npm/lib/npm.js448
-rw-r--r--deps/npm/lib/outdated.js146
-rw-r--r--deps/npm/lib/owner.js188
-rw-r--r--deps/npm/lib/pack.js66
-rw-r--r--deps/npm/lib/prefix.js10
-rw-r--r--deps/npm/lib/prune.js40
-rw-r--r--deps/npm/lib/publish.js166
-rw-r--r--deps/npm/lib/rebuild.js68
-rw-r--r--deps/npm/lib/restart.js1
-rw-r--r--deps/npm/lib/root.js11
-rw-r--r--deps/npm/lib/run-script.js100
-rw-r--r--deps/npm/lib/search.js222
-rw-r--r--deps/npm/lib/set.js12
-rw-r--r--deps/npm/lib/star.js34
-rw-r--r--deps/npm/lib/start.js1
-rw-r--r--deps/npm/lib/stop.js1
-rw-r--r--deps/npm/lib/submodule.js93
-rw-r--r--deps/npm/lib/tag.js17
-rw-r--r--deps/npm/lib/test.js1
-rw-r--r--deps/npm/lib/unbuild.js85
-rw-r--r--deps/npm/lib/uninstall.js55
-rw-r--r--deps/npm/lib/unpublish.js69
-rw-r--r--deps/npm/lib/update.js39
-rw-r--r--deps/npm/lib/utils/cmd-shim.js98
-rwxr-xr-xdeps/npm/lib/utils/completion.sh43
-rw-r--r--deps/npm/lib/utils/completion/file-completion.js29
-rw-r--r--deps/npm/lib/utils/completion/installed-deep.js46
-rw-r--r--deps/npm/lib/utils/completion/installed-shallow.js79
-rw-r--r--deps/npm/lib/utils/completion/remote-packages.js57
-rw-r--r--deps/npm/lib/utils/completion/users.js22
-rw-r--r--deps/npm/lib/utils/config-defs.js299
-rw-r--r--deps/npm/lib/utils/error-handler.js264
-rw-r--r--deps/npm/lib/utils/excludes.js145
-rw-r--r--deps/npm/lib/utils/exec.js106
-rw-r--r--deps/npm/lib/utils/fetch.js62
-rw-r--r--deps/npm/lib/utils/find-prefix.js50
-rw-r--r--deps/npm/lib/utils/find.js53
-rw-r--r--deps/npm/lib/utils/get-agent.js62
-rw-r--r--deps/npm/lib/utils/get.js6
-rw-r--r--deps/npm/lib/utils/ini.js333
-rw-r--r--deps/npm/lib/utils/lifecycle.js278
-rw-r--r--deps/npm/lib/utils/link.js30
-rw-r--r--deps/npm/lib/utils/load-package-defaults.js75
-rw-r--r--deps/npm/lib/utils/log.js185
-rw-r--r--deps/npm/lib/utils/mkdir-p.js191
-rw-r--r--deps/npm/lib/utils/npm-registry-client/adduser.js100
-rw-r--r--deps/npm/lib/utils/npm-registry-client/get.js171
-rw-r--r--deps/npm/lib/utils/npm-registry-client/index.js16
-rw-r--r--deps/npm/lib/utils/npm-registry-client/publish.js163
-rw-r--r--deps/npm/lib/utils/npm-registry-client/request.js242
-rw-r--r--deps/npm/lib/utils/npm-registry-client/star.js32
-rw-r--r--deps/npm/lib/utils/npm-registry-client/tag.js8
-rw-r--r--deps/npm/lib/utils/npm-registry-client/unpublish.js98
-rw-r--r--deps/npm/lib/utils/output.js156
-rw-r--r--deps/npm/lib/utils/promise-chain.js39
-rw-r--r--deps/npm/lib/utils/prompt.js69
-rw-r--r--deps/npm/lib/utils/read-installed.js295
-rw-r--r--deps/npm/lib/utils/read-json.js496
-rw-r--r--deps/npm/lib/utils/relativize.js70
-rw-r--r--deps/npm/lib/utils/set.js25
-rw-r--r--deps/npm/lib/utils/sha.js51
-rw-r--r--deps/npm/lib/utils/tar.js551
-rw-r--r--deps/npm/lib/utils/uid-number.js55
-rw-r--r--deps/npm/lib/version.js66
-rw-r--r--deps/npm/lib/view.js244
-rw-r--r--deps/npm/lib/whoami.js13
-rw-r--r--deps/npm/lib/xmas.js54
-rw-r--r--deps/npm/node_modules/abbrev/README.md23
-rw-r--r--deps/npm/node_modules/abbrev/lib/abbrev.js106
-rw-r--r--deps/npm/node_modules/abbrev/package.json8
-rw-r--r--deps/npm/node_modules/block-stream/README.md14
-rw-r--r--deps/npm/node_modules/block-stream/bench/block-stream.js68
-rw-r--r--deps/npm/node_modules/block-stream/bench/dropper-pause.js70
-rw-r--r--deps/npm/node_modules/block-stream/bench/dropper.js68
-rw-r--r--deps/npm/node_modules/block-stream/block-stream.js209
-rw-r--r--deps/npm/node_modules/block-stream/package.json23
-rw-r--r--deps/npm/node_modules/block-stream/test/basic.js27
-rw-r--r--deps/npm/node_modules/block-stream/test/nopad-thorough.js68
-rw-r--r--deps/npm/node_modules/block-stream/test/nopad.js57
-rw-r--r--deps/npm/node_modules/block-stream/test/pause-resume.js73
-rw-r--r--deps/npm/node_modules/block-stream/test/thorough.js68
-rw-r--r--deps/npm/node_modules/fstream/.gitignore3
-rw-r--r--deps/npm/node_modules/fstream/README.md76
-rw-r--r--deps/npm/node_modules/fstream/examples/pipe.js113
-rw-r--r--deps/npm/node_modules/fstream/examples/reader.js29
-rw-r--r--deps/npm/node_modules/fstream/examples/symlink-write.js9
-rw-r--r--deps/npm/node_modules/fstream/fstream.js31
-rw-r--r--deps/npm/node_modules/fstream/lib/abstract.js82
-rw-r--r--deps/npm/node_modules/fstream/lib/collect.js67
-rw-r--r--deps/npm/node_modules/fstream/lib/dir-reader.js180
-rw-r--r--deps/npm/node_modules/fstream/lib/dir-writer.js165
-rw-r--r--deps/npm/node_modules/fstream/lib/file-reader.js147
-rw-r--r--deps/npm/node_modules/fstream/lib/file-writer.js95
-rw-r--r--deps/npm/node_modules/fstream/lib/get-type.js32
-rw-r--r--deps/npm/node_modules/fstream/lib/link-reader.js54
-rw-r--r--deps/npm/node_modules/fstream/lib/link-writer.js82
-rw-r--r--deps/npm/node_modules/fstream/lib/proxy-reader.js87
-rw-r--r--deps/npm/node_modules/fstream/lib/proxy-writer.js109
-rw-r--r--deps/npm/node_modules/fstream/lib/reader.js234
-rw-r--r--deps/npm/node_modules/fstream/lib/writer.js324
-rw-r--r--deps/npm/node_modules/fstream/package.json21
-rw-r--r--deps/npm/node_modules/graceful-fs/LICENSE23
-rw-r--r--deps/npm/node_modules/graceful-fs/README.md5
-rw-r--r--deps/npm/node_modules/graceful-fs/graceful-fs.js39
-rw-r--r--deps/npm/node_modules/graceful-fs/package.json16
-rw-r--r--deps/npm/node_modules/inherits/LICENSE26
-rw-r--r--deps/npm/node_modules/inherits/README.md51
-rw-r--r--deps/npm/node_modules/inherits/inherits-old.js40
-rw-r--r--deps/npm/node_modules/inherits/inherits.js29
-rw-r--r--deps/npm/node_modules/inherits/package.json8
-rw-r--r--deps/npm/node_modules/ini/LICENSE23
-rw-r--r--deps/npm/node_modules/ini/README.md71
-rw-r--r--deps/npm/node_modules/ini/ini.js97
-rw-r--r--deps/npm/node_modules/ini/package.json24
-rw-r--r--deps/npm/node_modules/ini/test/fixtures/foo.ini18
-rw-r--r--deps/npm/node_modules/ini/test/foo.js40
-rw-r--r--deps/npm/node_modules/minimatch/.gitmodules3
-rw-r--r--deps/npm/node_modules/minimatch/LICENSE23
-rw-r--r--deps/npm/node_modules/minimatch/README.md107
-rw-r--r--deps/npm/node_modules/minimatch/minimatch.js399
-rw-r--r--deps/npm/node_modules/minimatch/node_modules/lru-cache/LICENSE23
-rw-r--r--deps/npm/node_modules/minimatch/node_modules/lru-cache/README.md12
-rw-r--r--deps/npm/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js151
-rw-r--r--deps/npm/node_modules/minimatch/node_modules/lru-cache/package.json13
-rw-r--r--deps/npm/node_modules/minimatch/package.json32
-rw-r--r--deps/npm/node_modules/minimatch/test/basic.js141
-rw-r--r--deps/npm/node_modules/mkdirp/LICENSE21
-rw-r--r--deps/npm/node_modules/mkdirp/README.markdown21
-rw-r--r--deps/npm/node_modules/mkdirp/examples/pow.js6
-rw-r--r--deps/npm/node_modules/mkdirp/index.js36
-rw-r--r--deps/npm/node_modules/mkdirp/package.json23
-rw-r--r--deps/npm/node_modules/mkdirp/test/chmod.js39
-rw-r--r--deps/npm/node_modules/mkdirp/test/clobber.js37
-rw-r--r--deps/npm/node_modules/mkdirp/test/mkdirp.js28
-rw-r--r--deps/npm/node_modules/mkdirp/test/race.js41
-rw-r--r--deps/npm/node_modules/mkdirp/test/rel.js32
-rw-r--r--deps/npm/node_modules/node-uuid/LICENSE.md3
-rw-r--r--deps/npm/node_modules/node-uuid/README.md100
-rw-r--r--deps/npm/node_modules/node-uuid/package.json12
-rw-r--r--deps/npm/node_modules/node-uuid/test/benchmark-native.c34
-rw-r--r--deps/npm/node_modules/node-uuid/test/benchmark.js27
-rw-r--r--deps/npm/node_modules/node-uuid/test/test.html14
-rw-r--r--deps/npm/node_modules/node-uuid/test/test.js83
-rw-r--r--deps/npm/node_modules/node-uuid/uuid.js80
-rw-r--r--deps/npm/node_modules/nopt/.gitignore0
-rw-r--r--deps/npm/node_modules/nopt/LICENSE23
-rw-r--r--deps/npm/node_modules/nopt/README.md208
-rwxr-xr-xdeps/npm/node_modules/nopt/bin/nopt.js44
-rwxr-xr-xdeps/npm/node_modules/nopt/examples/my-program.js30
-rw-r--r--deps/npm/node_modules/nopt/lib/nopt.js552
-rw-r--r--deps/npm/node_modules/nopt/package.json12
-rw-r--r--deps/npm/node_modules/proto-list/LICENSE23
-rw-r--r--deps/npm/node_modules/proto-list/README.md3
-rw-r--r--deps/npm/node_modules/proto-list/package.json9
-rw-r--r--deps/npm/node_modules/proto-list/proto-list.js94
-rw-r--r--deps/npm/node_modules/request/LICENSE55
-rw-r--r--deps/npm/node_modules/request/README.md193
-rw-r--r--deps/npm/node_modules/request/main.js506
-rw-r--r--deps/npm/node_modules/request/mimetypes.js146
-rw-r--r--deps/npm/node_modules/request/package.json15
-rw-r--r--deps/npm/node_modules/request/tests/googledoodle.pngbin0 -> 38510 bytes
-rwxr-xr-xdeps/npm/node_modules/request/tests/run.sh6
-rw-r--r--deps/npm/node_modules/request/tests/server.js57
-rw-r--r--deps/npm/node_modules/request/tests/test-body.js90
-rw-r--r--deps/npm/node_modules/request/tests/test-errors.js30
-rw-r--r--deps/npm/node_modules/request/tests/test-pipes.js167
-rw-r--r--deps/npm/node_modules/request/tests/test-timeout.js87
-rw-r--r--deps/npm/node_modules/rimraf/AUTHORS5
-rw-r--r--deps/npm/node_modules/rimraf/LICENSE23
-rw-r--r--deps/npm/node_modules/rimraf/README.md32
-rw-r--r--deps/npm/node_modules/rimraf/fiber.js86
-rw-r--r--deps/npm/node_modules/rimraf/package.json9
-rw-r--r--deps/npm/node_modules/rimraf/rimraf.js140
-rw-r--r--deps/npm/node_modules/rimraf/test/run.sh10
-rw-r--r--deps/npm/node_modules/rimraf/test/setup.sh47
-rw-r--r--deps/npm/node_modules/rimraf/test/test-async.js5
-rw-r--r--deps/npm/node_modules/rimraf/test/test-fiber.js15
-rw-r--r--deps/npm/node_modules/rimraf/test/test-sync.js3
-rw-r--r--deps/npm/node_modules/semver/LICENSE23
-rw-r--r--deps/npm/node_modules/semver/README.md119
-rwxr-xr-xdeps/npm/node_modules/semver/bin/semver71
-rw-r--r--deps/npm/node_modules/semver/package.json11
-rw-r--r--deps/npm/node_modules/semver/semver.js303
-rw-r--r--deps/npm/node_modules/semver/test.js397
-rw-r--r--deps/npm/node_modules/slide/.npmignore1
-rw-r--r--deps/npm/node_modules/slide/LICENSE23
-rw-r--r--deps/npm/node_modules/slide/README.md32
-rw-r--r--deps/npm/node_modules/slide/index.js1
-rw-r--r--deps/npm/node_modules/slide/lib/async-map-ordered.js65
-rw-r--r--deps/npm/node_modules/slide/lib/async-map.js56
-rw-r--r--deps/npm/node_modules/slide/lib/bind-actor.js16
-rw-r--r--deps/npm/node_modules/slide/lib/chain.js20
-rw-r--r--deps/npm/node_modules/slide/lib/slide.js3
-rw-r--r--deps/npm/node_modules/slide/package.json19
-rw-r--r--deps/npm/node_modules/tar/.gitignore4
-rw-r--r--deps/npm/node_modules/tar/README.md50
-rw-r--r--deps/npm/node_modules/tar/examples/extracter.js11
-rw-r--r--deps/npm/node_modules/tar/examples/reader.js36
-rw-r--r--deps/npm/node_modules/tar/lib/buffer-entry.js30
-rw-r--r--deps/npm/node_modules/tar/lib/entry-writer.js166
-rw-r--r--deps/npm/node_modules/tar/lib/entry.js212
-rw-r--r--deps/npm/node_modules/tar/lib/extended-header-writer.js165
-rw-r--r--deps/npm/node_modules/tar/lib/extended-header.js139
-rw-r--r--deps/npm/node_modules/tar/lib/extract.js64
-rw-r--r--deps/npm/node_modules/tar/lib/global-header-writer.js14
-rw-r--r--deps/npm/node_modules/tar/lib/header.js385
-rw-r--r--deps/npm/node_modules/tar/lib/pack.js212
-rw-r--r--deps/npm/node_modules/tar/lib/parse.js253
-rw-r--r--deps/npm/node_modules/tar/old/README.md1
-rw-r--r--deps/npm/node_modules/tar/old/doc/example.js24
-rw-r--r--deps/npm/node_modules/tar/old/generator.js387
-rw-r--r--deps/npm/node_modules/tar/old/parser.js344
-rw-r--r--deps/npm/node_modules/tar/old/tar.js74
-rw-r--r--deps/npm/node_modules/tar/old/test/test-generator.js13
-rw-r--r--deps/npm/node_modules/tar/old/test/test-generator.tarbin0 -> 3260 bytes
-rw-r--r--deps/npm/node_modules/tar/old/test/test-generator.txtbin0 -> 3261 bytes
-rw-r--r--deps/npm/node_modules/tar/old/test/test-parser.js28
-rw-r--r--deps/npm/node_modules/tar/old/test/test-tar.tarbin0 -> 3072 bytes
-rw-r--r--deps/npm/node_modules/tar/old/test/test-tar.txtbin0 -> 3073 bytes
-rw-r--r--deps/npm/node_modules/tar/package.json26
-rw-r--r--deps/npm/node_modules/tar/tar.js172
-rw-r--r--deps/npm/node_modules/tar/test/extract.js406
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/200.tarbin0 -> 3072 bytes
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/200L.hex50
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc1
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/200longlink.tarbin0 -> 3584 bytes
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/200longname.tarbin0 -> 3072 bytes
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/a.hex14
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/a.tarbin0 -> 2048 bytes
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/a.txt1
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/b.hex14
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/b.tarbin0 -> 2048 bytes
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/b.txt1
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/c.hex74
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/c.tarbin0 -> 18432 bytes
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/c.txt1
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/cc.txt1
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/foo.hex14
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/foo.js1
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/foo.tarbin0 -> 2048 bytes
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/hardlink-11
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/hardlink-21
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/omega.hex22
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/omega.tarbin0 -> 3072 bytes
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/omega.txt1
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/omegapax.tarbin0 -> 5120 bytes
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/packtest/omega.txt1
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/packtest/star.4.html1184
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/packtest/Ω.txt1
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc1
l---------deps/npm/node_modules/tar/test/fixtures/symlink1
-rw-r--r--deps/npm/node_modules/tar/test/fixtures/Ω.txt1
-rw-r--r--deps/npm/node_modules/tar/test/header.js183
-rw-r--r--deps/npm/node_modules/tar/test/pack.js953
-rw-r--r--deps/npm/node_modules/tar/test/parse.js359
-rw-r--r--deps/npm/node_modules/which/LICENSE23
-rw-r--r--deps/npm/node_modules/which/README.md5
-rwxr-xr-xdeps/npm/node_modules/which/bin/which14
-rw-r--r--deps/npm/node_modules/which/package.json17
-rw-r--r--deps/npm/node_modules/which/which.js67
-rw-r--r--deps/npm/package.json77
-rw-r--r--deps/npm/scripts/clean-old.sh165
-rwxr-xr-xdeps/npm/scripts/doc-build.sh71
-rw-r--r--deps/npm/scripts/index-build.js62
-rw-r--r--deps/npm/scripts/install.sh282
-rw-r--r--deps/npm/test/common.js7
-rw-r--r--deps/npm/test/disabled/bundlerecurs/package.json4
-rw-r--r--deps/npm/test/disabled/failer/package.json5
-rw-r--r--deps/npm/test/disabled/fast/package.json9
-rw-r--r--deps/npm/test/disabled/package-config/package.json4
-rwxr-xr-xdeps/npm/test/disabled/package-config/test.js17
-rw-r--r--deps/npm/test/disabled/slow/package.json9
-rw-r--r--deps/npm/test/disabled/startstop/package.json3
-rw-r--r--deps/npm/test/packages/npm-test-blerg/package.json4
-rw-r--r--deps/npm/test/packages/npm-test-blerg/test.js5
-rw-r--r--deps/npm/test/packages/npm-test-env-reader/package.json14
-rwxr-xr-xdeps/npm/test/packages/npm-test-env-reader/test.sh3
-rw-r--r--deps/npm/test/packages/npm-test-missing-bindir/package.json4
-rw-r--r--deps/npm/test/packages/npm-test-missing-bindir/test.js5
-rw-r--r--deps/npm/test/packages/npm-test-private/package.json4
-rw-r--r--deps/npm/test/packages/npm-test-test-package/package.json5
-rw-r--r--deps/npm/test/packages/npm-test-url-dep/package.json4
-rwxr-xr-xdeps/npm/test/run138
-rwxr-xr-xdeps/npm/test/update-test.sh59
-rw-r--r--tools/osx-pkg-postinstall.sh5
-rw-r--r--tools/osx-pkg.pmdoc/01local-contents.xml1
-rw-r--r--tools/osx-pkg.pmdoc/01local.xml2
-rw-r--r--tools/osx-pkg.pmdoc/02npm-contents.xml1
-rw-r--r--tools/osx-pkg.pmdoc/02npm.xml1
-rw-r--r--tools/osx-pkg.pmdoc/index.xml10
449 files changed, 33131 insertions, 4 deletions
diff --git a/LICENSE b/LICENSE
index d8d88d62cc..204ff4b93d 100644
--- a/LICENSE
+++ b/LICENSE
@@ -78,3 +78,9 @@ The externally maintained libraries used by Node are:
- deps/zlib copyright 1995-2010 Jean-loup Gailly and Mark Adler
licensed under a permissive free software license. See
deps/zlib/LICENSE.
+
+ - deps/npm NPM is a package manager program copyright 2009, 2010, 2011
+ Isaac Z. Schlueter and licensed under MIT. NPM includes several
+ subpackages MIT or Apache licenses, see deps/npm/LICENSE for more
+ information. NPM is included in the Node .msi and .pkg distributions
+ but not in the Node binary itself.
diff --git a/deps/npm/.gitignore b/deps/npm/.gitignore
new file mode 100644
index 0000000000..e3424f5f6d
--- /dev/null
+++ b/deps/npm/.gitignore
@@ -0,0 +1,15 @@
+*.swp
+test/bin
+test/output.log
+test/packages/*/node_modules
+test/packages/npm-test-depends-on-spark/which-spark.log
+test/packages/test-package/random-data.txt
+test/root
+node_modules/ronn
+node_modules/.bin
+npm-debug.log
+html/api/*.html
+html/doc/*.html
+man/
+doc/*/index.md
+./npmrc
diff --git a/deps/npm/.gitmodules b/deps/npm/.gitmodules
new file mode 100644
index 0000000000..b629bd2936
--- /dev/null
+++ b/deps/npm/.gitmodules
@@ -0,0 +1,51 @@
+[submodule "node_modules/semver"]
+ path = node_modules/semver
+ url = https://github.com/isaacs/node-semver.git
+[submodule "node_modules/abbrev"]
+ path = node_modules/abbrev
+ url = https://github.com/isaacs/abbrev-js.git
+[submodule "node_modules/nopt"]
+ path = node_modules/nopt
+ url = https://github.com/isaacs/nopt.git
+[submodule "node_modules/node-uuid"]
+ path = node_modules/node-uuid
+ url = https://github.com/broofa/node-uuid
+[submodule "node_modules/minimatch"]
+ path = node_modules/minimatch
+ url = https://github.com/isaacs/minimatch.git
+[submodule "node_modules/graceful-fs"]
+ path = node_modules/graceful-fs
+ url = https://github.com/isaacs/node-graceful-fs.git
+[submodule "node_modules/slide"]
+ path = node_modules/slide
+ url = https://github.com/isaacs/slide-flow-control.git
+[submodule "node_modules/rimraf"]
+ path = node_modules/rimraf
+ url = https://github.com/isaacs/rimraf.git
+[submodule "node_modules/proto-list"]
+ path = node_modules/proto-list
+ url = https://github.com/isaacs/proto-list.git
+[submodule "node_modules/ini"]
+ path = node_modules/ini
+ url = https://github.com/isaacs/ini.git
+[submodule "node_modules/which"]
+ path = node_modules/which
+ url = https://github.com/isaacs/node-which.git
+[submodule "node_modules/request"]
+ path = node_modules/request
+ url = https://github.com/isaacs/request.git
+[submodule "node_modules/tar"]
+ path = node_modules/tar
+ url = git://github.com/isaacs/node-tar.git
+[submodule "node_modules/fstream"]
+ path = node_modules/fstream
+ url = git://github.com/isaacs/fstream.git
+[submodule "node_modules/inherits"]
+ path = node_modules/inherits
+ url = git://github.com/isaacs/inherits.git
+[submodule "node_modules/block-stream"]
+ path = node_modules/block-stream
+ url = git://github.com/isaacs/block-stream.git
+[submodule "node_modules/mkdirp"]
+ path = node_modules/mkdirp
+ url = git://github.com/isaacs/node-mkdirp.git
diff --git a/deps/npm/.npmignore b/deps/npm/.npmignore
new file mode 100644
index 0000000000..c0a5f571a1
--- /dev/null
+++ b/deps/npm/.npmignore
@@ -0,0 +1,11 @@
+*.swp
+test/bin
+test/output.log
+test/packages/*/node_modules
+test/packages/npm-test-depends-on-spark/which-spark.log
+test/packages/test-package/random-data.txt
+test/root
+node_modules/ronn
+node_modules/.bin
+npm-debug.log
+./npmrc
diff --git a/deps/npm/AUTHORS b/deps/npm/AUTHORS
new file mode 100644
index 0000000000..59d7257e81
--- /dev/null
+++ b/deps/npm/AUTHORS
@@ -0,0 +1,50 @@
+# Authors sorted by whether or not they're me
+Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)
+Steve Steiner <ssteinerX@gmail.com> (http://websaucesoftware.com/blog/)
+Mikeal Rogers <mikeal.rogers@gmail.com> (http://www.mikealrogers.com/)
+Aaron Blohowiak <aaron.blohowiak@gmail.com> (http://aaronblohowiak.com/)
+Martyn Smith <martyn@dollyfish.net.nz> (http://dollyfish.net.nz/)
+Mathias Pettersson <mape@mape.me> (http://mape.me/)
+Brian Hammond <brian@fictorial.com> (http://fictorial.com/)
+Charlie Robbins <charlie.robbins@gmail.com> (http://www.charlierobbins.com/)
+Francisco Treacy <francisco.treacy@gmail.com> (http://franciscotreacy.com/)
+Cliffano Subagio <cliffano@gmail.com> (http://blog.cliffano.com/)
+Christian Eager <christian.eager@nokia.com> (http://perpenduum.com)
+Dav Glass <davglass@gmail.com> (http://blog.davglass.com)
+Alex K. Wolfe <alexkwolfe@gmail.com>
+James Sanders <jimmyjazz14@gmail.com> (http://james-sanders.com/)
+Reid Burke <me@reidburke.com> (http://reidburke.com/)
+Arlo Breault <arlolra@gmail.com> (http://thoughtherder.com/)
+Timo Derstappen <teemow@gmail.com> (http://teemow.com)
+Bradley Meck <bradley.meck@gmail.com>
+Bart Teeuwisse <bart.teeuwisse@thecodemill.biz> (http://thecodemill.biz/)
+Ben Noordhuis <info@bnoordhuis.nl> (http://bnoordhuis.nl/)
+Tor Valamo <tor.valamo@gmail.com> (http://www.magnimedia.no/)
+Whyme.Lyu <5longluna@gmail.com> (http://whyme.kuantu.com/)
+Olivier Melcher <olivier.melcher@gmail.com>
+Tomaž Muraus <kami@k5-storitve.net> (http://www.tomaz-muraus.info)
+Evan Meagher <evan.meagher@gmail.com> (http://evanmeagher.net/)
+Orlando Vazquez <ovazquez@gmail.com> (http://2wycked.net/)
+George Miroshnykov <gmiroshnykov@lohika.com>
+Geoff Flarity (http://ca.linkedin.com/pub/geoff-flarity/a/536/43a)
+Pete Kruckenberg <pete@kruckenberg.com>
+Laurie Harper <laurie@holoweb.net> (http://laurie.holoweb.net/)
+Chris Wong <chris@chriswongstudio.com>
+Max Goodman <c@chromacode.com> (http://chromacode.com/)
+Scott Bronson <brons_github@rinspin.com>
+Federico Romero <federomero@gmail.com>
+Visnu Pitiyanuvath <visnupx@gmail.com> (http://visnup.com)
+Irakli Gozalishvili <rfobic@gmail.com> (http://jeditoolkit.com/)
+Mark Cahill <mark@tiemonster.info> (http://www.tiemonster.info/)
+Zearin <zearin@gonk.net>
+Iain Sproat <iainsproat@gmail.com>
+Trent Mick <trentm@gmail.com> (http://trentm.com/)
+Felix Geisendörfer <felix@debuggable.com> (http://www.debuggable.com/)
+Conny Brunnkvist <cbrunnkvist@gmail.com> (http://twitter.com/connyb)
+Will Elwood <w.elwood08@gmail.com> (https://github.com/welwood08)
+Oleg Efimov <efimovov@gmail.com> (http://sannis.ru)
+Martin Cooper <mfncooper@gmail.com>
+Jameson Little <t.jameson.little@gmail.com>
+cspotcode <cspotcode@gmail.com>
+Maciej Małecki <maciej.malecki@notimplemented.org>
+Stephen Sugden <glurgle@gmail.com>
diff --git a/deps/npm/CHANGES b/deps/npm/CHANGES
new file mode 120000
index 0000000000..6b73a61615
--- /dev/null
+++ b/deps/npm/CHANGES
@@ -0,0 +1 @@
+doc/cli/changelog.md \ No newline at end of file
diff --git a/deps/npm/LICENSE b/deps/npm/LICENSE
new file mode 100644
index 0000000000..71ca385bdf
--- /dev/null
+++ b/deps/npm/LICENSE
@@ -0,0 +1,61 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter (the "Author")
+All rights reserved.
+
+MIT +no-false-attribs License
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+Distributions of all or part of the Software intended to be used
+by the recipients as they would use the unmodified Software,
+containing modifications that substantially alter, remove, or
+disable functionality of the Software, outside of the documented
+configuration mechanisms provided by the Software, shall be
+modified such that the Author's bug reporting email addresses and
+urls are either replaced with the contact information of the
+parties responsible for the changes, or removed entirely.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
+
+
+Except where noted, this license applies to any and all software
+programs and associated documentation files created by the
+Author, when distributed with the Software.
+
+"Node.js" and "node" trademark Joyent, Inc. npm is not officially
+part of the Node.js project, and is neither owned by nor
+officially affiliated with Joyent, Inc.
+
+Packages published in the npm registry are not part of npm
+itself, are the sole property of their respective maintainers,
+and are not covered by this license.
+
+"npm Logo" created by Mathias Pettersson and Brian Hammond,
+used with permission.
+
+This program includes a BSDTar/LibArchive version 2.8.3-1 binary,
+originally distributed as part of the MinGW suite, compiled for
+Win32, according to the terms of the BSD license.
+See deps/basic-bsdtar-2.8.3-1-ming32-bin/basic-bsdtar.LICENSE.
+
+This program uses "node-uuid", Copyright (c) 2010 Robert Kieffer,
+according to the terms of the MIT license.
+
+This program uses "request", Copyright (c) 2011 Mikeal Rogers,
+according to the terms of the Apache license.
diff --git a/deps/npm/Makefile b/deps/npm/Makefile
new file mode 100644
index 0000000000..e10381baf7
--- /dev/null
+++ b/deps/npm/Makefile
@@ -0,0 +1,125 @@
+SHELL = bash
+
+markdowns = $(shell find doc -name '*.md' | grep -v 'index') README.md
+
+cli_mandocs = $(shell find doc/cli -name '*.md' \
+ |sed 's|.md|.1|g' \
+ |sed 's|doc/cli/|man/man1/|g' ) \
+ man/man1/README.1 \
+ man/man1/index.1
+
+api_mandocs = $(shell find doc/api -name '*.md' \
+ |sed 's|.md|.3|g' \
+ |sed 's|doc/api/|man/man3/|g' )
+
+cli_htmldocs = $(shell find doc/cli -name '*.md' \
+ |grep -v 'index.md' \
+ |sed 's|.md|.html|g' \
+ |sed 's|doc/cli/|html/doc/|g' ) \
+ html/doc/README.html \
+ html/doc/index.html
+
+api_htmldocs = $(shell find doc/api -name '*.md' \
+ |sed 's|.md|.html|g' \
+ |sed 's|doc/api/|html/api/|g' )
+
+mandocs = $(api_mandocs) $(cli_mandocs)
+
+htmldocs = $(api_htmldocs) $(cli_htmldocs)
+
+all: submodules doc
+
+submodules:
+ ! [ -d .git ] || git submodule update --init --recursive
+
+latest: submodules
+ @echo "Installing latest published npm"
+ @echo "Use 'make install' or 'make link' to install the code"
+ @echo "in this folder that you're looking at right now."
+ node cli.js install -g -f npm
+
+install: all
+ node cli.js install -g -f
+
+# backwards compat
+dev: install
+
+link: uninstall
+ node cli.js link -f
+
+clean: doc-clean uninstall
+ rm npmrc
+ node cli.js cache clean
+
+uninstall: submodules
+ node cli.js rm npm -g -f
+
+doc: $(mandocs) $(htmldocs)
+
+docclean: doc-clean
+doc-clean:
+ rm -rf \
+ node_modules/ronn \
+ node_modules/.bin/ronn \
+ .building_ronn \
+ doc/cli/index.md \
+ doc/api/index.md \
+ $(api_mandocs) \
+ $(cli_mandocs) \
+ $(api_htmldocs) \
+ $(cli_htmldocs) \
+ &>/dev/null || true
+
+# use `npm install ronn` for this to work.
+man/man1/README.1: README.md scripts/doc-build.sh package.json
+ scripts/doc-build.sh $< $@
+
+man/man1/%.1: doc/cli/%.md scripts/doc-build.sh package.json
+ @[ -d man/man1 ] || mkdir -p man/man1
+ scripts/doc-build.sh $< $@
+
+man/man3/%.3: doc/api/%.md scripts/doc-build.sh package.json
+ @[ -d man/man3 ] || mkdir -p man/man3
+ scripts/doc-build.sh $< $@
+
+html/doc/README.html: README.md html/dochead.html html/docfoot.html scripts/doc-build.sh package.json
+ scripts/doc-build.sh $< $@
+
+html/doc/%.html: doc/cli/%.md html/dochead.html html/docfoot.html scripts/doc-build.sh package.json
+ scripts/doc-build.sh $< $@
+
+html/api/%.html: doc/api/%.md html/dochead.html html/docfoot.html scripts/doc-build.sh package.json
+ scripts/doc-build.sh $< $@
+
+doc/cli/index.md: $(markdowns) scripts/index-build.js scripts/doc-build.sh package.json
+ node scripts/index-build.js > $@
+
+node_modules/ronn:
+ node cli.js install https://github.com/isaacs/ronnjs/tarball/master
+
+doc: man
+
+man: $(cli_docs) $(api_docs)
+
+test: submodules
+ node cli.js test
+
+version: link
+ git add package.json &&\
+ git ci -m v$(shell npm -v)
+
+publish: link
+ git tag -s -m v$(shell npm -v) v$(shell npm -v) &&\
+ git push origin master --tags &&\
+ npm publish &&\
+ make doc-publish
+
+docpublish: doc-publish
+doc-publish: doc
+ rsync -vazu --stats --no-implied-dirs --delete html/doc/ npmjs.org:/var/www/npmjs.org/public/doc
+ rsync -vazu --stats --no-implied-dirs --delete html/api/ npmjs.org:/var/www/npmjs.org/public/api
+
+sandwich:
+ @[ $$(whoami) = "root" ] && (echo "ok"; echo "ham" > sandwich) || echo "make it yourself"
+
+.PHONY: all latest install dev link doc clean uninstall test man doc-publish doc-clean docclean docpublish
diff --git a/deps/npm/README.md b/deps/npm/README.md
new file mode 100644
index 0000000000..df1fdfc25c
--- /dev/null
+++ b/deps/npm/README.md
@@ -0,0 +1,274 @@
+npm(1) -- node package manager
+==============================
+
+## SYNOPSIS
+
+This is just enough info to get you up and running.
+
+Much more info available via `npm help` once it's installed.
+
+## IMPORTANT
+
+**You need node v0.4 or higher to run this program.**
+
+To install an old **and unsupported** version of npm that works on node 0.3
+and prior, clone the git repo and dig through the old tags and branches.
+
+## Simple Install (Unix only, sorry)
+
+To install npm with one command, do this:
+
+ curl http://npmjs.org/install.sh | sh
+
+To skip the npm 0.x cleanup, do this:
+
+ curl http://npmjs.org/install.sh | clean=no sh
+
+To say "yes" to the 0.x cleanup, but skip the prompt:
+
+ curl http://npmjs.org/install.sh | clean=yes sh
+
+If you get permission errors, see the section below, entitled
+"Permission Errors on Installation".
+
+## Installing on Windows -- Experimental
+
+Yes, this sucks. A convenient one-liner is coming soon.
+
+### Step 1: Drop the node.exe somewhere
+
+You will probably need the latest version of node, **at least** version
+`0.5.8` or higher. You can get it from
+<http://nodejs.org/dist/v0.5.8/node.exe>.
+
+### Step 2 (optional): Update the %PATH% environment variable
+
+Update your `%PATH%` environment variable in System Properties:
+Advanced: Environment, so that it includes the `bin` folder you chose.
+The entries are separated by semicolons.
+
+You *may* be able to do this from the command line using `set` and
+`setx`. `cd` into the `bin` folder you created in step 1, and do this:
+
+ set path=%PATH%;%CD%
+ setx path "%PATH%"
+
+This will have the added advantage that you'll be able to simply type
+`npm` or `node` in any project folder to access those commands.
+
+If you decide not to update the PATH, and put the node.exe file in
+`C:\node\node.exe`, then the npm executable will end up `C:\node\npm.cmd`,
+and you'll have to type `C:\node\npm <command>` to use it.
+
+### Step 3: Install git
+
+If you don't already have git,
+[install it](https://git.wiki.kernel.org/index.php/MSysGit:InstallMSysGit).
+
+Run `git --version` to make sure that it's at least version 1.7.6.
+
+### Step 4: install npm
+
+Lastly, **after** node.exe, git, and your %PATH% have *all* been set up
+properly, install npm itself:
+
+ git config --system http.sslcainfo /bin/curl-ca-bundle.crt
+ git clone --recursive git://github.com/isaacs/npm.git
+ cd npm
+ node cli.js install npm -gf
+
+## Permission Errors (`EACCES` or `EACCESS`) on Installation
+
+On Windows, you may need to run the command prompt in elevated
+permission mode. (Right-click on cmd.exe, Run as Administrator.)
+
+On Unix, you may need to run as root, or use `sudo`.
+
+**Note**: You would need to `sudo` the `sh`, **not** the `curl`. Fetching
+stuff from the internet typically doesn't require elevated permissions.
+Running it might.
+
+I highly recommend that you first download the file, and make sure that
+it is what you expect, and *then* run it.
+
+ curl -O http://npmjs.org/install.sh
+ # inspect file..
+ sudo sh install.sh
+
+## Installing on Cygwin
+
+No.
+
+## Dev Install
+
+To install the latest **unstable** development version from git:
+
+ git clone https://github.com/isaacs/npm.git
+ cd npm
+ git submodule update --init --recursive
+ sudo make install # (or: `node cli.js install -gf`)
+
+If you're sitting in the code folder reading this document in your
+terminal, then you've already got the code. Just do:
+
+ git submodule update --init --recursive
+ sudo make install
+
+and npm will install itself.
+
+If you don't have make, and don't have curl or git, and ALL you have is
+this code and node, you can probably do this:
+
+ git submodule update --init --recursive
+ sudo node ./cli.js install -g
+
+Note that github tarballs **do not contain submodules**, so
+those won't work. You'll have to also fetch the appropriate submodules
+listed in the .gitmodules file.
+
+## Permissions when Using npm to Install Other Stuff
+
+**tl;dr**
+
+* Use `sudo` for greater safety. Or don't, if you prefer not to.
+* npm will downgrade permissions if it's root before running any build
+ scripts that package authors specified.
+
+### More details...
+
+As of version 0.3, it is recommended to run npm as root.
+This allows npm to change the user identifier to the `nobody` user prior
+to running any package build or test commands.
+
+If you are not the root user, or if you are on a platform that does not
+support uid switching, then npm will not attempt to change the userid.
+
+If you would like to ensure that npm **always** runs scripts as the
+"nobody" user, and have it fail if it cannot downgrade permissions, then
+set the following configuration param:
+
+ npm config set unsafe-perm false
+
+This will prevent running in unsafe mode, even as non-root users.
+
+## Uninstalling
+
+So sad to see you go.
+
+ sudo npm uninstall npm -g
+
+Or, if that fails,
+
+ sudo make uninstall
+
+## More Severe Uninstalling
+
+Usually, the above instructions are sufficient. That will remove
+npm, but leave behind anything you've installed.
+
+If you would like to remove all the packages that you have installed,
+then you can use the `npm ls` command to find them, and then `npm rm` to
+remove them.
+
+To remove cruft left behind by npm 0.x, you can use the included
+`clean-old.sh` script file. You can run it conveniently like this:
+
+ npm explore npm -g -- sh scripts/clean-old.sh
+
+npm uses two configuration files, one for per-user configs, and another
+for global (every-user) configs. You can view them by doing:
+
+ npm config get userconfig # defaults to ~/.npmrc
+ npm config get globalconfig # defaults to /usr/local/etc/npmrc
+
+Uninstalling npm does not remove configuration files by default. You
+must remove them yourself manually if you want them gone. Note that
+this means that future npm installs will not remember the settings that
+you have chosen.
+
+## Using npm Programmatically
+
+If you would like to use npm programmatically, you can do that.
+It's not very well documented, but it *is* rather simple.
+
+ var npm = require("npm")
+ npm.load(myConfigObject, function (er) {
+ if (er) return handlError(er)
+ npm.commands.install(["some", "args"], function (er, data) {
+ if (er) return commandFailed(er)
+ // command succeeded, and data might have some info
+ })
+ npm.on("log", function (message) { .... })
+ })
+
+The `load` function takes an object hash of the command-line configs.
+The various `npm.commands.<cmd>` functions take an **array** of
+positional argument **strings**. The last argument to any
+`npm.commands.<cmd>` function is a callback. Some commands take other
+optional arguments. Read the source.
+
+You cannot set configs individually for any single npm function at this
+time. Since `npm` is a singleton, any call to `npm.config.set` will
+change the value for *all* npm commands in that process.
+
+See `./bin/npm-cli.js` for an example of pulling config values off of the
+command line arguments using nopt. You may also want to check out `npm
+help config` to learn about all the options you can set there.
+
+## More Docs
+
+Check out the [docs](http://npmjs.org/doc/),
+especially the
+[faq](http://npmjs.org/doc/faq.html).
+
+You can use the `npm help` command to read any of them.
+
+If you're a developer, and you want to use npm to publish your program,
+you should
+[read this](http://npmjs.org/doc/developers.html)
+
+## Legal Stuff
+
+"npm" and "the npm registry" are owned by Isaac Z. Schlueter. All
+rights not explicitly granted in the MIT license are reserved. See the
+included LICENSE file for more details.
+
+"Node.js" and "node" are trademarks owned by Joyent, Inc. npm is not
+officially part of the Node.js project, and is neither owned by nor
+officially affiliated with Joyent, Inc.
+
+The packages in the npm registry are not part of npm itself, and are the
+sole property of their respective maintainers. While every effort is
+made to ensure accountability, there is absolutely no guarantee,
+warrantee, or assertion made as to the quality, fitness for a specific
+purpose, or lack of malice in any given npm package. Modules
+published on the npm registry are not affiliated with or endorsed by
+Joyent, Inc., Isaac Z. Schlueter, Ryan Dahl, or the Node.js project.
+
+If you have a complaint about a package in the npm registry, and cannot
+resolve it with the package owner, please express your concerns to
+Isaac Z. Schlueter at <i@izs.me>.
+
+### In plain english
+
+This is mine; not my employer's, not Node's, not Joyent's, not Ryan
+Dahl's.
+
+If you publish something, it's yours, and you are solely accountable
+for it. Not me, not Node, not Joyent, not Ryan Dahl.
+
+If other people publish something, it's theirs. Not mine, not Node's,
+not Joyent's, not Ryan Dahl's.
+
+Yes, you can publish something evil. It will be removed promptly if
+reported, and we'll lose respect for you. But there is no vetting
+process for published modules.
+
+If this concerns you, inspect the source before using packages.
+
+## SEE ALSO
+
+* npm(1)
+* npm-faq(1)
+* npm-help(1)
+* npm-index(1)
diff --git a/deps/npm/bin/npm-cli.js b/deps/npm/bin/npm-cli.js
new file mode 100755
index 0000000000..db6db2fbd2
--- /dev/null
+++ b/deps/npm/bin/npm-cli.js
@@ -0,0 +1,77 @@
+#!/usr/bin/env node
+;(function () { // wrapper in case we're in module_context mode
+
+// windows: running "npm blah" in this folder will invoke WSH, not node.
+if (typeof WScript !== "undefined") {
+ WScript.echo("npm does not work when run\n"
+ +"with the Windows Scripting Host\n\n"
+ +"'cd' to a different directory,\n"
+ +"or type 'npm.cmd <args>',\n"
+ +"or type 'node npm <args>'.")
+ WScript.quit(1)
+ return
+}
+
+var log = require("../lib/utils/log.js")
+log.waitForConfig()
+log.info("ok", "it worked if it ends with")
+
+var fs = require("graceful-fs")
+ , path = require("path")
+ , npm = require("../lib/npm.js")
+ , ini = require("../lib/utils/ini.js")
+ , errorHandler = require("../lib/utils/error-handler.js")
+
+ , configDefs = require("../lib/utils/config-defs.js")
+ , shorthands = configDefs.shorthands
+ , types = configDefs.types
+ , nopt = require("nopt")
+
+// if npm is called as "npmg" or "npm_g", then
+// run in global mode.
+if (path.basename(process.argv[1]).slice(-1) === "g") {
+ process.argv.splice(1, 1, "npm", "-g")
+}
+
+log.verbose(process.argv, "cli")
+
+var conf = nopt(types, shorthands)
+npm.argv = conf.argv.remain
+if (npm.deref(npm.argv[0])) npm.command = npm.argv.shift()
+else conf.usage = true
+
+
+if (conf.version) {
+ console.log(npm.version)
+ return
+}
+
+log.info("npm@"+npm.version, "using")
+log.info("node@"+process.version, "using")
+
+// make sure that this version of node works with this version of npm.
+var semver = require("semver")
+ , nodeVer = process.version
+ , reqVer = npm.nodeVersionRequired
+if (reqVer && !semver.satisfies(nodeVer, reqVer)) {
+ return errorHandler(new Error(
+ "npm doesn't work with node " + nodeVer
+ + "\nRequired: node@" + reqVer), true)
+}
+
+process.on("uncaughtException", errorHandler)
+
+if (conf.usage && npm.command !== "help") {
+ npm.argv.unshift(npm.command)
+ npm.command = "help"
+}
+
+// now actually fire up npm and run the command.
+// this is how to use npm programmatically:
+conf._exit = true
+npm.load(conf, function (er) {
+ if (er) return errorHandler(er)
+ npm.commands[npm.command](npm.argv, errorHandler)
+})
+
+})()
diff --git a/deps/npm/bin/npm-g.cmd b/deps/npm/bin/npm-g.cmd
new file mode 100644
index 0000000000..bac9e5f1c4
--- /dev/null
+++ b/deps/npm/bin/npm-g.cmd
@@ -0,0 +1,6 @@
+:: Created by npm, please don't edit manually.
+@IF EXIST "%~dp0"\"node.exe" (
+ "%~dp0"\"node.exe" "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
+) ELSE (
+ node "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
+) \ No newline at end of file
diff --git a/deps/npm/bin/npm-get-uid-gid.js b/deps/npm/bin/npm-get-uid-gid.js
new file mode 100755
index 0000000000..390e0f2fc9
--- /dev/null
+++ b/deps/npm/bin/npm-get-uid-gid.js
@@ -0,0 +1,16 @@
+var argv = process.argv.slice(2)
+ , user = argv[0] || process.getuid()
+ , group = argv[1] || process.getgid()
+
+if (!isNaN(user)) user = +user
+if (!isNaN(group)) group = +group
+
+console.error([user, group])
+
+try {
+ process.setgid(group)
+ process.setuid(user)
+ console.log(JSON.stringify({uid:+process.getuid(), gid:+process.getgid()}))
+} catch (ex) {
+ console.log(JSON.stringify({error:ex.message,errno:ex.errno}))
+}
diff --git a/deps/npm/bin/npm.cmd b/deps/npm/bin/npm.cmd
new file mode 100644
index 0000000000..bac9e5f1c4
--- /dev/null
+++ b/deps/npm/bin/npm.cmd
@@ -0,0 +1,6 @@
+:: Created by npm, please don't edit manually.
+@IF EXIST "%~dp0"\"node.exe" (
+ "%~dp0"\"node.exe" "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
+) ELSE (
+ node "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
+) \ No newline at end of file
diff --git a/deps/npm/bin/npm_g.cmd b/deps/npm/bin/npm_g.cmd
new file mode 100644
index 0000000000..bac9e5f1c4
--- /dev/null
+++ b/deps/npm/bin/npm_g.cmd
@@ -0,0 +1,6 @@
+:: Created by npm, please don't edit manually.
+@IF EXIST "%~dp0"\"node.exe" (
+ "%~dp0"\"node.exe" "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
+) ELSE (
+ node "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
+) \ No newline at end of file
diff --git a/deps/npm/bin/read-package-json.js b/deps/npm/bin/read-package-json.js
new file mode 100755
index 0000000000..8c95d86e8b
--- /dev/null
+++ b/deps/npm/bin/read-package-json.js
@@ -0,0 +1,22 @@
+var argv = process.argv
+if (argv.length < 3) {
+ console.error("Usage: read-package.json <file> [<fields> ...]")
+ process.exit(1)
+}
+
+var fs = require("fs")
+ , file = argv[2]
+ , readJson = require("../lib/utils/read-json")
+
+readJson(file, function (er, data) {
+ if (er) throw er
+ if (argv.length === 3) console.log(data)
+ else argv.slice(3).forEach(function (field) {
+ field = field.split(".")
+ var val = data
+ field.forEach(function (f) {
+ val = val[f]
+ })
+ console.log(val)
+ })
+})
diff --git a/deps/npm/cli.js b/deps/npm/cli.js
new file mode 100755
index 0000000000..0df931e35a
--- /dev/null
+++ b/deps/npm/cli.js
@@ -0,0 +1,2 @@
+#!/usr/bin/env node
+require("./bin/npm-cli.js")
diff --git a/deps/npm/configure b/deps/npm/configure
new file mode 100755
index 0000000000..b13c8d0d73
--- /dev/null
+++ b/deps/npm/configure
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+# set configurations that will be "sticky" on this system,
+# surviving npm self-updates.
+
+CONFIGS=()
+i=0
+
+# get the location of this file.
+unset CDPATH
+CONFFILE=$(cd $(dirname "$0"); pwd -P)/npmrc
+
+while [ $# -gt 0 ]; do
+ conf="$1"
+ case $conf in
+ --help)
+ echo "./configure --param=value ..."
+ exit 0
+ ;;
+ --*)
+ CONFIGS[$i]="${conf:2}"
+ ;;
+ *)
+ CONFIGS[$i]="$conf"
+ ;;
+ esac
+ let i++
+ shift
+done
+
+for c in "${CONFIGS[@]}"; do
+ echo "$c" >> "$CONFFILE"
+done
diff --git a/deps/npm/doc/api/author.md b/deps/npm/doc/api/author.md
new file mode 120000
index 0000000000..b7a53cb66b
--- /dev/null
+++ b/deps/npm/doc/api/author.md
@@ -0,0 +1 @@
+owner.md \ No newline at end of file
diff --git a/deps/npm/doc/api/bin.md b/deps/npm/doc/api/bin.md
new file mode 100644
index 0000000000..f3dc48286d
--- /dev/null
+++ b/deps/npm/doc/api/bin.md
@@ -0,0 +1,13 @@
+npm-bin(3) -- Display npm bin folder
+====================================
+
+## SYNOPSIS
+
+ npm.commands.bin(args, cb)
+
+## DESCRIPTION
+
+Print the folder where npm will install executables.
+
+This function should not be used programmatically. Instead, just refer
+to the `npm.bin` member.
diff --git a/deps/npm/doc/api/bugs.md b/deps/npm/doc/api/bugs.md
new file mode 100644
index 0000000000..cc4db8f9ec
--- /dev/null
+++ b/deps/npm/doc/api/bugs.md
@@ -0,0 +1,19 @@
+npm-bugs(3) -- Bugs for a package in a web browser maybe
+========================================================
+
+## SYNOPSIS
+
+ npm.commands.bugs(package, callback)
+
+## DESCRIPTION
+
+This command tries to guess at the likely location of a package's
+bug tracker URL, and then tries to open it using the `--browser`
+config param.
+
+Like other commands, the first parameter is an array. This command only
+uses the first element, which is expected to be a package name with an
+optional version number.
+
+This command will launch a browser, so this command may not be the most
+friendly for programmatic use.
diff --git a/deps/npm/doc/api/commands.md b/deps/npm/doc/api/commands.md
new file mode 100644
index 0000000000..eb7545639d
--- /dev/null
+++ b/deps/npm/doc/api/commands.md
@@ -0,0 +1,22 @@
+npm-commands(3) -- npm commands
+===============================
+
+## SYNOPSIS
+
+ npm.commands[<command>](args, callback)
+
+## DESCRIPTION
+
+npm comes with a full set of commands, and each of the commands takes a
+similar set of arguments.
+
+In general, all commands on the command object take an **array** of positional
+argument **strings**. The last argument to any function is a callback. Some
+commands are special and take other optional arguments.
+
+All commands have their own man page. See `man npm-<command>` for command-line
+usage, or `man 3 npm-<command>` for programmatic usage.
+
+## SEE ALSO
+
+* npm-index(1)
diff --git a/deps/npm/doc/api/config.md b/deps/npm/doc/api/config.md
new file mode 100644
index 0000000000..7ae2274281
--- /dev/null
+++ b/deps/npm/doc/api/config.md
@@ -0,0 +1,45 @@
+npm-config(3) -- Manage the npm configuration files
+===================================================
+
+## SYNOPSIS
+
+ npm.commands.config(args, callback)
+ var val = npm.config.get(key)
+ npm.config.set(key, val)
+
+## DESCRIPTION
+
+This function acts much the same way as the command-line version. The first
+element in the array tells config what to do. Possible values are:
+
+* `set`
+
+ Sets a config parameter. The second element in `args` is interpreted as the
+ key, and the third element is interpreted as the value.
+
+* `get`
+
+ Gets the value of a config parameter. The second element in `args` is the
+ key to get the value of.
+
+* `delete` (`rm` or `del`)
+
+ Deletes a parameter from the config. The second element in `args` is the
+ key to delete.
+
+* `list` (`ls`)
+
+ Show all configs that aren't secret. No parameters necessary.
+
+* `edit`:
+
+ Opens the config file in the default editor. This command isn't very useful
+ programmatically, but it is made available.
+
+To programmatically access npm configuration settings, or set them for
+the duration of a program, use the `npm.config.set` and `npm.config.get`
+functions instead.
+
+## SEE ALSO
+
+* npm(3)
diff --git a/deps/npm/doc/api/deprecate.md b/deps/npm/doc/api/deprecate.md
new file mode 100644
index 0000000000..ac94fd7a9f
--- /dev/null
+++ b/deps/npm/doc/api/deprecate.md
@@ -0,0 +1,32 @@
+npm-deprecate(3) -- Deprecate a version of a package
+====================================================
+
+## SYNOPSIS
+
+ npm.commands.deprecate(args, callback)
+
+## DESCRIPTION
+
+This command will update the npm registry entry for a package, providing
+a deprecation warning to all who attempt to install it.
+
+The 'args' parameter must have exactly two elements:
+
+* `package[@version]`
+
+ The `version` portion is optional, and may be either a range, or a
+ specific version, or a tag.
+
+* `message`
+
+ The warning message that will be printed whenever a user attempts to
+ install the package.
+
+Note that you must be the package owner to deprecate something. See the
+`owner` and `adduser` help topics.
+
+## SEE ALSO
+
+* npm-publish(3)
+* npm-unpublish(3)
+* npm-registry(1)
diff --git a/deps/npm/doc/api/docs.md b/deps/npm/doc/api/docs.md
new file mode 100644
index 0000000000..2c5fc5e632
--- /dev/null
+++ b/deps/npm/doc/api/docs.md
@@ -0,0 +1,19 @@
+npm-docs(3) -- Docs for a package in a web browser maybe
+========================================================
+
+## SYNOPSIS
+
+ npm.commands.docs(package, callback)
+
+## DESCRIPTION
+
+This command tries to guess at the likely location of a package's
+documentation URL, and then tries to open it using the `--browser`
+config param.
+
+Like other commands, the first parameter is an array. This command only
+uses the first element, which is expected to be a package name with an
+optional version number.
+
+This command will launch a browser, so this command may not be the most
+friendly for programmatic use.
diff --git a/deps/npm/doc/api/edit.md b/deps/npm/doc/api/edit.md
new file mode 100644
index 0000000000..b13fbb8578
--- /dev/null
+++ b/deps/npm/doc/api/edit.md
@@ -0,0 +1,24 @@
+npm-edit(3) -- Edit an installed package
+========================================
+
+## SYNOPSIS
+
+ npm.commands.edit(package, callback)
+
+## DESCRIPTION
+
+Opens the package folder in the default editor (or whatever you've
+configured as the npm `editor` config -- see `npm help config`.)
+
+After it has been edited, the package is rebuilt so as to pick up any
+changes in compiled packages.
+
+For instance, you can do `npm install connect` to install connect
+into your package, and then `npm.commands.edit(["connect"], callback)`
+to make a few changes to your locally installed copy.
+
+The first parameter is a string array with a single element, the package
+to open. The package can optionally have a version number attached.
+
+Since this command opens an editor in a new process, be careful about where
+and how this is used.
diff --git a/deps/npm/doc/api/explore.md b/deps/npm/doc/api/explore.md
new file mode 100644
index 0000000000..a239f3df31
--- /dev/null
+++ b/deps/npm/doc/api/explore.md
@@ -0,0 +1,18 @@
+npm-explore(3) -- Browse an installed package
+=============================================
+
+## SYNOPSIS
+
+ npm.commands.explore(args, callback)
+
+## DESCRIPTION
+
+Spawn a subshell in the directory of the installed package specified.
+
+If a command is specified, then it is run in the subshell, which then
+immediately terminates.
+
+Note that the package is *not* automatically rebuilt afterwards, so be
+sure to use `npm rebuild <pkg>` if you make any changes.
+
+The first element in the 'args' parameter must be a package name. After that is the optional command, which can be any number of strings. All of the strings will be combined into one, space-delimited command.
diff --git a/deps/npm/doc/api/find.md b/deps/npm/doc/api/find.md
new file mode 120000
index 0000000000..5b3debb8f1
--- /dev/null
+++ b/deps/npm/doc/api/find.md
@@ -0,0 +1 @@
+ls.md \ No newline at end of file
diff --git a/deps/npm/doc/api/get.md b/deps/npm/doc/api/get.md
new file mode 120000
index 0000000000..3dc8737366
--- /dev/null
+++ b/deps/npm/doc/api/get.md
@@ -0,0 +1 @@
+config.md \ No newline at end of file
diff --git a/deps/npm/doc/api/help-search.md b/deps/npm/doc/api/help-search.md
new file mode 100644
index 0000000000..5c00cfc177
--- /dev/null
+++ b/deps/npm/doc/api/help-search.md
@@ -0,0 +1,30 @@
+npm-help-search(3) -- Search the help pages
+===========================================
+
+## SYNOPSIS
+
+ npm.commands.helpSearch(args, [silent,] callback)
+
+## DESCRIPTION
+
+This command is rarely useful, but it exists in the rare case that it is.
+
+This command takes an array of search terms and returns the help pages that
+match in order of best match.
+
+If there is only one match, then npm displays that help section. If there
+are multiple results, the results are printed to the screen formatted and the
+array of results is returned. Each result is an object with these properties:
+
+* hits:
+ A map of args to number of hits on that arg. For example, {"npm": 3}
+* found:
+ Total number of unique args that matched.
+* totalHits:
+ Total number of hits.
+* lines:
+ An array of all matching lines (and some adjacent lines).
+* file:
+ Name of the file that matched
+
+The silent parameter is not neccessary not used, but it may in the future.
diff --git a/deps/npm/doc/api/home.md b/deps/npm/doc/api/home.md
new file mode 120000
index 0000000000..8828313f5b
--- /dev/null
+++ b/deps/npm/doc/api/home.md
@@ -0,0 +1 @@
+docs.md \ No newline at end of file
diff --git a/deps/npm/doc/api/init.md b/deps/npm/doc/api/init.md
new file mode 100644
index 0000000000..5afc11b3ba
--- /dev/null
+++ b/deps/npm/doc/api/init.md
@@ -0,0 +1,29 @@
+npm init(3) -- Interactively create a package.json file
+=======================================================
+
+## SYNOPSIS
+
+ npm.commands.init(args, callback)
+
+## DESCRIPTION
+
+This will ask you a bunch of questions, and then write a package.json for you.
+
+It attempts to make reasonable guesses about what you want things to be set to,
+and then writes a package.json file with the options you've selected.
+
+If you already have a package.json file, it'll read that first, and default to
+the options in there.
+
+It is strictly additive, so it does not delete options from your package.json
+without a really good reason to do so.
+
+Since this function expects to be run on the command-line, it doesn't work very
+well as a programmatically. The best option is to roll your own, and since
+JavaScript makes it stupid simple to output formatted JSON, that is the
+preferred method. If you're sure you want to handle command-line prompting,
+then go ahead and use this programmatically.
+
+## SEE ALSO
+
+npm-json(1)
diff --git a/deps/npm/doc/api/install.md b/deps/npm/doc/api/install.md
new file mode 100644
index 0000000000..12f665a76c
--- /dev/null
+++ b/deps/npm/doc/api/install.md
@@ -0,0 +1,19 @@
+npm-install(3) -- install a package programmatically
+====================================================
+
+## SYNOPSIS
+
+ npm.commands.install([where,] packages, callback)
+
+## DESCRIPTION
+
+This acts much the same ways as installing on the command-line.
+
+The 'where' parameter is optional and only used internally, and it specifies
+where the packages should be installed to.
+
+The 'packages' parameter is an array of strings. Each element in the array is
+the name of a package to be installed.
+
+Finally, 'callback' is a function that will be called when all packages have been
+installed or when an error has been encountered.
diff --git a/deps/npm/doc/api/link.md b/deps/npm/doc/api/link.md
new file mode 100644
index 0000000000..ad8cefcab3
--- /dev/null
+++ b/deps/npm/doc/api/link.md
@@ -0,0 +1,33 @@
+npm-link(3) -- Symlink a package folder
+=======================================
+
+## SYNOPSIS
+
+ npm.command.link(callback)
+ npm.command.link(packages, callback)
+
+## DESCRIPTION
+
+Package linking is a two-step process.
+
+Without parameters, link will create a globally-installed
+symbolic link from `prefix/package-name` to the current folder.
+
+With a parameters, link will create a symlink from the local `node_modules`
+folder to the global symlink.
+
+When creating tarballs for `npm publish`, the linked packages are
+"snapshotted" to their current state by resolving the symbolic links.
+
+This is
+handy for installing your own stuff, so that you can work on it and test it
+iteratively without having to continually rebuild.
+
+For example:
+
+ npm.commands.link(cb) # creates global link from the cwd
+ # (say redis package)
+ npm.commands.link('redis', cb) # link-install the package
+
+Now, any changes to the redis package will be reflected in
+the package in the current working directory
diff --git a/deps/npm/doc/api/list.md b/deps/npm/doc/api/list.md
new file mode 120000
index 0000000000..5b3debb8f1
--- /dev/null
+++ b/deps/npm/doc/api/list.md
@@ -0,0 +1 @@
+ls.md \ No newline at end of file
diff --git a/deps/npm/doc/api/ln.md b/deps/npm/doc/api/ln.md
new file mode 120000
index 0000000000..243f994145
--- /dev/null
+++ b/deps/npm/doc/api/ln.md
@@ -0,0 +1 @@
+link.md \ No newline at end of file
diff --git a/deps/npm/doc/api/load.md b/deps/npm/doc/api/load.md
new file mode 100644
index 0000000000..a95a6b295d
--- /dev/null
+++ b/deps/npm/doc/api/load.md
@@ -0,0 +1,26 @@
+npm-load(3) -- Load config settings
+===================================
+
+## SYNOPSIS
+
+ npm.load(conf, cb)
+
+## DESCRIPTION
+
+npm.load() must be called before any other function call. Both parameters are
+optional, but the second is recommended.
+
+The first parameter is an object hash of command-line config params, and the
+second parameter is a callback that will be called when npm is loaded and
+ready to serve.
+
+The first parameter should follow a similar structure as the package.json
+config object.
+
+For example, to emulate the --dev flag, pass an object that looks like this:
+
+ {
+ "dev": true
+ }
+
+For a list of all the available command-line configs, see `npm help config`
diff --git a/deps/npm/doc/api/ls.md b/deps/npm/doc/api/ls.md
new file mode 100644
index 0000000000..a6c0a13821
--- /dev/null
+++ b/deps/npm/doc/api/ls.md
@@ -0,0 +1,50 @@
+npm-ls(3) -- List installed packages
+======================================
+
+## SYNOPSIS
+
+ npm.commands.ls(args, [silent,] callback)
+
+## DESCRIPTION
+
+This command will print to stdout all the versions of packages that are
+installed, as well as their dependencies, in a tree-structure. It will also
+return that data using the callback.
+
+This command does not take any arguments, but args must be defined.
+Beyond that, if any arguments are passed in, npm will politely warn that it
+does not take positional arguments, though you may set config flags
+like with any other command, such as `global` to list global packages.
+
+It will print out extraneous, missing, and invalid packages.
+
+If the silent parameter is set to true, nothing will be output to the screen,
+but the data will still be returned.
+
+## CONFIGURATION
+
+### long
+
+* Default: false
+* Type: Boolean
+
+Show extended information.
+
+### parseable
+
+* Default: false
+* Type: Boolean
+
+Show parseable output instead of tree view.
+
+### global
+
+* Default: false
+* Type: Boolean
+
+List packages in the global install prefix instead of in the current
+project.
+
+Note, if parseable is set or long isn't set, then duplicates will be trimmed.
+This means that if a submodule a same dependency as a parent module, then the
+dependency will only be output once.
diff --git a/deps/npm/doc/api/npm.md b/deps/npm/doc/api/npm.md
new file mode 100644
index 0000000000..a2f034c4b7
--- /dev/null
+++ b/deps/npm/doc/api/npm.md
@@ -0,0 +1,115 @@
+npm(3) -- node package manager
+==============================
+
+## SYNOPSIS
+
+ var npm = require("npm")
+ npm.load(configObject, function (er, npm) {
+ // use the npm object, now that it's loaded.
+
+ npm.config.set(key, val)
+ val = npm.config.get(key)
+
+ console.log("prefix = %s", npm.prefix)
+
+ npm.commands.install(["package"], cb)
+ })
+
+## VERSION
+
+@VERSION@
+
+## DESCRIPTION
+
+This is the API documentation for npm.
+To find documentation of the command line
+client, see `npm(1)`.
+
+Prior to using npm's commands,
+`npm.load()` must be called with an object hash of
+top-level configs. In the npm command line client,
+this set of configs is parsed from the command line options. Additional
+configuration params are loaded from two configuration files. See
+`npm-config(1)` for more information.
+
+After that, each of the functions are accessible in the
+commands object: `npm.commands.<cmd>`. See `npm-index(1)` for a list of
+all possible commands.
+
+All commands on the command object take an **array** of positional argument
+**strings**. The last argument to any function is a callback. Some
+commands take other optional arguments.
+
+Configs cannot currently be set on a per function basis, as each call to
+npm.config.set will change the value for *all* npm commands in that process.
+
+To find API documentation for a specific command, run the `npm apihelp`
+command.
+
+## METHODS AND PROPERTIES
+
+* `npm.load(configs, cb)`
+
+ Load the configuration params, and call the `cb` function once the
+ globalconfig and userconfig files have been loaded as well, or on
+ nextTick if they've already been loaded.
+
+* `npm.config`
+
+ An object for accessing npm configuration parameters.
+
+ * `npm.config.get(key)`
+ * `npm.config.set(key, val)`
+ * `npm.config.del(key)`
+
+* `npm.dir` or `npm.root`
+
+ The `node_modules` directory where npm will operate.
+
+* `npm.prefix`
+
+ The prefix where npm is operating. (Most often the current working
+ directory.)
+
+* `npm.cache`
+
+ The place where npm keeps JSON and tarballs it fetches from the
+ registry (or uploads to the registry).
+
+* `npm.tmp`
+
+ npm's temporary working directory.
+
+* `npm.deref`
+
+ Get the "real" name for a command that has either an alias or
+ abbreviation.
+
+## MAGIC
+
+For each of the methods in the `npm.commands` hash, a method is added to
+the npm object, which takes a set of positional string arguments rather
+than an array and a callback.
+
+If the last argument is a callback, then it will use the supplied
+callback. However, if no callback is provided, then it will print out
+the error or results.
+
+For example, this would work in a node repl:
+
+ > npm = require("npm")
+ > npm.load() // wait a sec...
+ > npm.install("dnode", "express")
+
+Note that that *won't* work in a node program, since the `install`
+method will get called before the configuration load is completed.
+
+## ABBREVS
+
+In order to support `npm ins foo` instead of `npm install foo`, the
+`npm.commands` object has a set of abbreviations as well as the full
+method names. Use the `npm.deref` method to find the real name.
+
+For example:
+
+ var cmd = npm.deref("unp") // cmd === "unpublish"
diff --git a/deps/npm/doc/api/outdated.md b/deps/npm/doc/api/outdated.md
new file mode 100644
index 0000000000..89f4cf6faa
--- /dev/null
+++ b/deps/npm/doc/api/outdated.md
@@ -0,0 +1,13 @@
+npm-outdated(3) -- Check for outdated packages
+==============================================
+
+## SYNOPSIS
+
+ npm.commands.outdated([packages,] callback)
+
+## DESCRIPTION
+
+This command will check the registry to see if the specified packages are
+currently outdated.
+
+If the 'packages' parameter is left out, npm will check all packages.
diff --git a/deps/npm/doc/api/owner.md b/deps/npm/doc/api/owner.md
new file mode 100644
index 0000000000..de203c072a
--- /dev/null
+++ b/deps/npm/doc/api/owner.md
@@ -0,0 +1,31 @@
+npm-owner(3) -- Manage package owners
+=====================================
+
+## SYNOPSIS
+
+ npm.commands.owner(args, callback)
+
+## DESCRIPTION
+
+The first element of the 'args' parameter defines what to do, and the subsequent
+elements depend on the action. Possible values for the action are (order of
+parameters are given in parenthesis):
+
+* ls (package):
+ List all the users who have access to modify a package and push new versions.
+ Handy when you need to know who to bug for help.
+* add (user, package):
+ Add a new user as a maintainer of a package. This user is enabled to modify
+ metadata, publish new versions, and add other owners.
+* rm (user, package):
+ Remove a user from the package owner list. This immediately revokes their
+ privileges.
+
+Note that there is only one level of access. Either you can modify a package,
+or you can't. Future versions may contain more fine-grained access levels, but
+that is not implemented at this time.
+
+## SEE ALSO
+
+* npm-publish(3)
+* npm-registry(1)
diff --git a/deps/npm/doc/api/pack.md b/deps/npm/doc/api/pack.md
new file mode 100644
index 0000000000..cb339c0c42
--- /dev/null
+++ b/deps/npm/doc/api/pack.md
@@ -0,0 +1,19 @@
+npm-pack(3) -- Create a tarball from a package
+==============================================
+
+## SYNOPSIS
+
+ npm.commands.pack([packages,] callback)
+
+## DESCRIPTION
+
+For anything that's installable (that is, a package folder, tarball,
+tarball url, name@tag, name@version, or name), this command will fetch
+it to the cache, and then copy the tarball to the current working
+directory as `<name>-<version>.tgz`, and then write the filenames out to
+stdout.
+
+If the same package is specified multiple times, then the file will be
+overwritten the second time.
+
+If no arguments are supplied, then npm packs the current package folder.
diff --git a/deps/npm/doc/api/prefix.md b/deps/npm/doc/api/prefix.md
new file mode 100644
index 0000000000..806dd4b6cb
--- /dev/null
+++ b/deps/npm/doc/api/prefix.md
@@ -0,0 +1,15 @@
+npm-prefix(3) -- Display prefix
+===============================
+
+## SYNOPSIS
+
+ npm.commands.prefix(args, callback)
+
+## DESCRIPTION
+
+Print the prefix to standard out.
+
+'args' is never used and callback is never called with data.
+'args' must be present or things will break.
+
+This function is not useful programmatically
diff --git a/deps/npm/doc/api/prune.md b/deps/npm/doc/api/prune.md
new file mode 100644
index 0000000000..2a4f177485
--- /dev/null
+++ b/deps/npm/doc/api/prune.md
@@ -0,0 +1,17 @@
+npm-prune(3) -- Remove extraneous packages
+==========================================
+
+## SYNOPSIS
+
+ npm.commands.prune([packages,] callback)
+
+## DESCRIPTION
+
+This command removes "extraneous" packages.
+
+The first parameter is optional, and it specifies packages to be removed.
+
+No packages are specified, then all packages will be checked.
+
+Extraneous packages are packages that are not listed on the parent
+package's dependencies list.
diff --git a/deps/npm/doc/api/publish.md b/deps/npm/doc/api/publish.md
new file mode 100644
index 0000000000..a743303f88
--- /dev/null
+++ b/deps/npm/doc/api/publish.md
@@ -0,0 +1,30 @@
+npm-publish(3) -- Publish a package
+===================================
+
+## SYNOPSIS
+
+ npm.commands.publish([packages,] callback)
+
+## DESCRIPTION
+
+Publishes a package to the registry so that it can be installed by name.
+Possible values in the 'packages' array are:
+
+* `<folder>`:
+ A folder containing a package.json file
+
+* `<tarball>`:
+ A url or file path to a gzipped tar archive containing a single folder
+ with a package.json file inside.
+
+If the package array is empty, npm will try to publish something in the
+current working directory.
+
+This command could fails if one of the packages specified already exists in
+the registry. Overwrites when the "force" environment variable is set.
+
+## SEE ALSO
+
+* npm-registry(1)
+* npm-adduser(1)
+* npm-owner(3)
diff --git a/deps/npm/doc/api/rebuild.md b/deps/npm/doc/api/rebuild.md
new file mode 100644
index 0000000000..8b8989806a
--- /dev/null
+++ b/deps/npm/doc/api/rebuild.md
@@ -0,0 +1,16 @@
+npm-rebuild(3) -- Rebuild a package
+===================================
+
+## SYNOPSIS
+
+ npm.commands.rebuild([packages,] callback)
+
+## DESCRIPTION
+
+This command runs the `npm build` command on each of the matched packages. This is useful
+when you install a new version of node, and must recompile all your C++ addons with
+the new binary. If no 'packages' parameter is specify, every package will be rebuilt.
+
+## CONFIGURATION
+
+See `npm help build`
diff --git a/deps/npm/doc/api/restart.md b/deps/npm/doc/api/restart.md
new file mode 100644
index 0000000000..c40704438e
--- /dev/null
+++ b/deps/npm/doc/api/restart.md
@@ -0,0 +1,22 @@
+npm-restart(3) -- Start a package
+=================================
+
+## SYNOPSIS
+
+ npm.commands.restart(packages, callback)
+
+## DESCRIPTION
+
+This runs a package's "restart" script, if one was provided.
+Otherwise it runs package's "stop" script, if one was provided, and then
+the "start" script.
+
+If no version is specified, then it restarts the "active" version.
+
+npm can run tests on multiple packages. Just specify multiple packages
+in the `packages` parameter.
+
+## SEE ALSO
+
+* npm-start(3)
+* npm-stop(3)
diff --git a/deps/npm/doc/api/rm.md b/deps/npm/doc/api/rm.md
new file mode 120000
index 0000000000..32d3b511f9
--- /dev/null
+++ b/deps/npm/doc/api/rm.md
@@ -0,0 +1 @@
+uninstall.md \ No newline at end of file
diff --git a/deps/npm/doc/api/root.md b/deps/npm/doc/api/root.md
new file mode 100644
index 0000000000..1c3ab56402
--- /dev/null
+++ b/deps/npm/doc/api/root.md
@@ -0,0 +1,15 @@
+npm-root(3) -- Display npm root
+===============================
+
+## SYNOPSIS
+
+ npm.commands.root(args, callback)
+
+## DESCRIPTION
+
+Print the effective `node_modules` folder to standard out.
+
+'args' is never used and callback is never called with data.
+'args' must be present or things will break.
+
+This function is not useful programmatically.
diff --git a/deps/npm/doc/api/run-script.md b/deps/npm/doc/api/run-script.md
new file mode 100644
index 0000000000..f15900ecbc
--- /dev/null
+++ b/deps/npm/doc/api/run-script.md
@@ -0,0 +1,27 @@
+npm-run-script(3) -- Run arbitrary package scripts
+==================================================
+
+## SYNOPSIS
+
+ npm.commands.run-script(args, callback)
+
+## DESCRIPTION
+
+This runs an arbitrary command from a package's "scripts" object.
+
+It is used by the test, start, restart, and stop commands, but can be
+called directly, as well.
+
+The 'args' parameter is an array of strings. Behavior depends on the number
+of elements. If there is only one element, npm assumes that the element
+represents a command to be run on the local repository. If there is more than
+one element, then the first is assumed to be the package and the second is
+assumed to be the command to run. All other elements are ignored.
+
+## SEE ALSO
+
+* npm-scripts(1)
+* npm-test(3)
+* npm-start(3)
+* npm-restart(3)
+* npm-stop(3)
diff --git a/deps/npm/doc/api/search.md b/deps/npm/doc/api/search.md
new file mode 100644
index 0000000000..30651d76a4
--- /dev/null
+++ b/deps/npm/doc/api/search.md
@@ -0,0 +1,35 @@
+npm-search(3) -- Search for packages
+====================================
+
+## SYNOPSIS
+
+ npm.commands.search(searchTerms, [silent,] [staleness,] callback)
+
+## DESCRIPTION
+
+Search the registry for packages matching the search terms. The available parameters are:
+
+* searchTerms:
+ Array of search terms. These terms are case-insensitive.
+* silent:
+ If true, npm will not log anything to the console.
+* staleness:
+ This is the threshold for stale packages. "Fresh" packages are not refreshed
+ from the registry. This value is measured in seconds.
+* callback:
+ Returns an object where each key is the name of a package, and the value
+ is information about that package along with a 'words' property, which is
+ a space-delimited string of all of the interesting words in that package.
+ The only properties included are those that are searched, which generally include:
+
+ * name
+ * description
+ * maintainers
+ * url
+ * keywords
+
+A search on the registry excludes any result that does not match all of the
+search terms. It also removes any items from the results that contain an
+excluded term (the "searchexclude" config). The search is case insensitive
+and doesn't try to read your mind (it doesn't do any verb tense matching or the
+like).
diff --git a/deps/npm/doc/api/set.md b/deps/npm/doc/api/set.md
new file mode 120000
index 0000000000..3dc8737366
--- /dev/null
+++ b/deps/npm/doc/api/set.md
@@ -0,0 +1 @@
+config.md \ No newline at end of file
diff --git a/deps/npm/doc/api/start.md b/deps/npm/doc/api/start.md
new file mode 100644
index 0000000000..74491146aa
--- /dev/null
+++ b/deps/npm/doc/api/start.md
@@ -0,0 +1,13 @@
+npm-start(3) -- Start a package
+===============================
+
+## SYNOPSIS
+
+ npm.commands.start(packages, callback)
+
+## DESCRIPTION
+
+This runs a package's "start" script, if one was provided.
+
+npm can run tests on multiple packages. Just specify multiple packages
+in the `packages` parameter.
diff --git a/deps/npm/doc/api/stop.md b/deps/npm/doc/api/stop.md
new file mode 100644
index 0000000000..0f8333d351
--- /dev/null
+++ b/deps/npm/doc/api/stop.md
@@ -0,0 +1,13 @@
+npm-stop(3) -- Stop a package
+=============================
+
+## SYNOPSIS
+
+ npm.commands.stop(packages, callback)
+
+## DESCRIPTION
+
+This runs a package's "stop" script, if one was provided.
+
+npm can run stop on multiple packages. Just specify multiple packages
+in the `packages` parameter.
diff --git a/deps/npm/doc/api/submodule.md b/deps/npm/doc/api/submodule.md
new file mode 100644
index 0000000000..2d8bafaa31
--- /dev/null
+++ b/deps/npm/doc/api/submodule.md
@@ -0,0 +1,28 @@
+npm-submodule(3) -- Add a package as a git submodule
+====================================================
+
+## SYNOPSIS
+
+ npm.commands.submodule(packages, callback)
+
+## DESCRIPTION
+
+For each package specified, npm will check if it has a git repository url
+in its package.json description then add it as a git submodule at
+`node_modules/<pkg name>`.
+
+This is a convenience only. From then on, it's up to you to manage
+updates by using the appropriate git commands. npm will stubbornly
+refuse to update, modify, or remove anything with a `.git` subfolder
+in it.
+
+This command also does not install missing dependencies, if the package
+does not include them in its git repository. If `npm ls` reports that
+things are missing, you can either install, link, or submodule them yourself,
+or you can do `npm explore <pkgname> -- npm install` to install the
+dependencies into the submodule folder.
+
+## SEE ALSO
+
+* npm help json
+* git help submodule
diff --git a/deps/npm/doc/api/tag.md b/deps/npm/doc/api/tag.md
new file mode 100644
index 0000000000..b5a3d7faa6
--- /dev/null
+++ b/deps/npm/doc/api/tag.md
@@ -0,0 +1,23 @@
+npm-tag(3) -- Tag a published version
+=====================================
+
+## SYNOPSIS
+
+ npm.commands.tag(package@version, tag, callback)
+
+## DESCRIPTION
+
+Tags the specified version of the package with the specified tag, or the
+`--tag` config if not specified.
+
+The 'package@version' is an array of strings, but only the first two elements are
+currently used.
+
+The first element must be in the form package@version, where package
+is the package name and version is the version number (much like installing a
+specific version).
+
+The second element is the name of the tag to tag this version with. If this
+parameter is missing or falsey (empty), the default froom the config will be
+used. For more information about how to set this config, check
+`man 3 npm-config` for programmatic usage or `man npm-config` for cli usage.
diff --git a/deps/npm/doc/api/test.md b/deps/npm/doc/api/test.md
new file mode 100644
index 0000000000..bc48dcc35f
--- /dev/null
+++ b/deps/npm/doc/api/test.md
@@ -0,0 +1,16 @@
+npm-test(3) -- Test a package
+=============================
+
+## SYNOPSIS
+
+ npm.commands.test(packages, callback)
+
+## DESCRIPTION
+
+This runs a package's "test" script, if one was provided.
+
+To run tests as a condition of installation, set the `npat` config to
+true.
+
+npm can run tests on multiple packages. Just specify multiple packages
+in the `packages` parameter.
diff --git a/deps/npm/doc/api/uninstall.md b/deps/npm/doc/api/uninstall.md
new file mode 100644
index 0000000000..4505295b3b
--- /dev/null
+++ b/deps/npm/doc/api/uninstall.md
@@ -0,0 +1,16 @@
+npm-uninstall(3) -- uninstall a package programmatically
+========================================================
+
+## SYNOPSIS
+
+ npm.commands.uninstall(packages, callback)
+
+## DESCRIPTION
+
+This acts much the same ways as uninstalling on the command-line.
+
+The 'packages' parameter is an array of strings. Each element in the array is
+the name of a package to be uninstalled.
+
+Finally, 'callback' is a function that will be called when all packages have been
+uninstalled or when an error has been encountered.
diff --git a/deps/npm/doc/api/unpublish.md b/deps/npm/doc/api/unpublish.md
new file mode 100644
index 0000000000..6cbc5c1f37
--- /dev/null
+++ b/deps/npm/doc/api/unpublish.md
@@ -0,0 +1,20 @@
+npm-unpublish(3) -- Remove a package from the registry
+======================================================
+
+## SYNOPSIS
+
+ npm.commands.unpublish(package, callback)
+
+## DESCRIPTION
+
+This removes a package version from the registry, deleting its
+entry and removing the tarball.
+
+The package parameter must be defined.
+
+Only the first element in the package parameter is used. If there is no first
+element, then npm assumes that the package at the current working directory
+is what is meant.
+
+If no version is specified, or if all versions are removed then
+the root package entry is removed from the registry entirely.
diff --git a/deps/npm/doc/api/update.md b/deps/npm/doc/api/update.md
new file mode 100644
index 0000000000..bf02fd3c84
--- /dev/null
+++ b/deps/npm/doc/api/update.md
@@ -0,0 +1,11 @@
+npm-update(3) -- Update a package
+=================================
+
+## SYNOPSIS
+ npm.commands.update(packages, callback)
+
+# DESCRIPTION
+
+Updates a package, upgrading it to the latest version. It also installs any missing packages.
+
+The 'packages' argument is an array of packages to update. The 'callback' parameter will be called when done or when an error occurs.
diff --git a/deps/npm/doc/api/version.md b/deps/npm/doc/api/version.md
new file mode 100644
index 0000000000..bd40102c45
--- /dev/null
+++ b/deps/npm/doc/api/version.md
@@ -0,0 +1,18 @@
+npm-version(3) -- Bump a package version
+========================================
+
+## SYNOPSIS
+
+ npm.commands.version(newversion, callback)
+
+## DESCRIPTION
+
+Run this in a package directory to bump the version and write the new
+data back to the package.json file.
+
+If run in a git repo, it will also create a version commit and tag, and
+fail if the repo is not clean.
+
+Like all other commands, this function takes a string array as its first
+parameter. The difference, however, is this function will fail if it does
+not have exactly one element. The only element should be a version number.
diff --git a/deps/npm/doc/api/view.md b/deps/npm/doc/api/view.md
new file mode 100644
index 0000000000..fd0076c967
--- /dev/null
+++ b/deps/npm/doc/api/view.md
@@ -0,0 +1,93 @@
+npm-view(3) -- View registry info
+=================================
+
+## SYNOPSIS
+
+ npm.commands.view(args, [silent,] callback)
+
+## DESCRIPTION
+
+This command shows data about a package and prints it to the stream
+referenced by the `outfd` config, which defaults to stdout.
+
+The "args" parameter is an ordered list that closely resembles the command-line
+usage. The elements should be ordered such that the first element is
+the package and version (package@version). The version is optional. After that,
+the rest of the parameters are fields with optional subfields ("field.subfield")
+which can be used to get only the information desired from the registry.
+
+The callback will be passed all of the data returned by the query.
+
+For example, to get the package registry entry for the `connect` package,
+you can do this:
+
+ npm.commands.view(["connect"], callback)
+
+If no version is specified, "latest" is assumed.
+
+Field names can be specified after the package descriptor.
+For example, to show the dependencies of the `ronn` package at version
+0.3.5, you could do the following:
+
+ npm.commands.view(["ronn@0.3.5", "dependencies"], callback)
+
+You can view child field by separating them with a period.
+To view the git repository URL for the latest version of npm, you could
+do this:
+
+ npm.commands.view(["npm", "repository.url"], callback)
+
+For fields that are arrays, requesting a non-numeric field will return
+all of the values from the objects in the list. For example, to get all
+the contributor names for the "express" project, you can do this:
+
+ npm.commands.view(["express", "contributors.email"], callback)
+
+You may also use numeric indices in square braces to specifically select
+an item in an array field. To just get the email address of the first
+contributor in the list, you can do this:
+
+ npm.commands.view(["express", "contributors[0].email"], callback)
+
+Multiple fields may be specified, and will be printed one after another.
+For exampls, to get all the contributor names and email addresses, you
+can do this:
+
+ npm.commands.view(["express", "contributors.name", "contributors.email"], callback)
+
+"Person" fields are shown as a string if they would be shown as an
+object. So, for example, this will show the list of npm contributors in
+the shortened string format. (See `npm help json` for more on this.)
+
+ npm.commands.view(["npm", "contributors"], callback)
+
+If a version range is provided, then data will be printed for every
+matching version of the package. This will show which version of jsdom
+was required by each matching version of yui3:
+
+ npm.commands.view(["yui3@'>0.5.4'", "dependencies.jsdom"], callback)
+
+## OUTPUT
+
+If only a single string field for a single version is output, then it
+will not be colorized or quoted, so as to enable piping the output to
+another command.
+
+If the version range matches multiple versions, than each printed value
+will be prefixed with the version it applies to.
+
+If multiple fields are requested, than each of them are prefixed with
+the field name.
+
+Console output can be disabled by setting the 'silent' parameter to true.
+
+## RETURN VALUE
+
+The data returned will be an object in this formation:
+
+ { <version>:
+ { <field>: <value>
+ , ... }
+ , ... }
+
+corresponding to the list of fields selected.
diff --git a/deps/npm/doc/api/whoami.md b/deps/npm/doc/api/whoami.md
new file mode 100644
index 0000000000..598a1ab1a3
--- /dev/null
+++ b/deps/npm/doc/api/whoami.md
@@ -0,0 +1,15 @@
+npm-whoami(3) -- Display npm username
+=====================================
+
+## SYNOPSIS
+
+ npm.commands.whoami(args, callback)
+
+## DESCRIPTION
+
+Print the `username` config to standard output.
+
+'args' is never used and callback is never called with data.
+'args' must be present or things will break.
+
+This function is not useful programmatically
diff --git a/deps/npm/doc/cli/adduser.md b/deps/npm/doc/cli/adduser.md
new file mode 100644
index 0000000000..51aa6f6a3d
--- /dev/null
+++ b/deps/npm/doc/cli/adduser.md
@@ -0,0 +1,36 @@
+npm-adduser(1) -- Add a registry user account
+=============================================
+
+## SYNOPSIS
+
+ npm adduser
+
+## DESCRIPTION
+
+Create or verify a user named `<username>` in the npm registry, and
+save the credentials to the `.npmrc` file.
+
+The username, password, and email are read in from prompts.
+
+You may use this command to change your email address, but not username
+or password.
+
+To reset your password, go to <http://admin.npmjs.org/>
+
+You may use this command multiple times with the same user account to
+authorize on a new machine.
+
+## CONFIGURATION
+
+### registry
+
+Default: http://registry.npmjs.org/
+
+The base URL of the npm package registry.
+
+## SEE ALSO
+
+* npm-registry(1)
+* npm-config(1)
+* npm-owner(1)
+* npm-whoami(1)
diff --git a/deps/npm/doc/cli/author.md b/deps/npm/doc/cli/author.md
new file mode 120000
index 0000000000..b7a53cb66b
--- /dev/null
+++ b/deps/npm/doc/cli/author.md
@@ -0,0 +1 @@
+owner.md \ No newline at end of file
diff --git a/deps/npm/doc/cli/bin.md b/deps/npm/doc/cli/bin.md
new file mode 100644
index 0000000000..2c2e7c4772
--- /dev/null
+++ b/deps/npm/doc/cli/bin.md
@@ -0,0 +1,17 @@
+npm-bin(1) -- Display npm bin folder
+====================================
+
+## SYNOPSIS
+
+ npm bin
+
+## DESCRIPTION
+
+Print the folder where npm will install executables.
+
+## SEE ALSO
+
+* npm-prefix(1)
+* npm-root(1)
+* npm-folders(1)
+* npm-config(1)
diff --git a/deps/npm/doc/cli/bugs.md b/deps/npm/doc/cli/bugs.md
new file mode 100644
index 0000000000..2e57cc891b
--- /dev/null
+++ b/deps/npm/doc/cli/bugs.md
@@ -0,0 +1,38 @@
+npm-bugs(1) -- Bugs for a package in a web browser maybe
+========================================================
+
+## SYNOPSIS
+
+ npm bugs <pkgname>
+
+## DESCRIPTION
+
+This command tries to guess at the likely location of a package's
+bug tracker URL, and then tries to open it using the `--browser`
+config param.
+
+## CONFIGURATION
+
+### browser
+
+* Default: OS X: `"open"`, others: `"google-chrome"`
+* Type: String
+
+The browser that is called by the `npm bugs` command to open websites.
+
+### registry
+
+* Default: https://registry.npmjs.org/
+* Type: url
+
+The base URL of the npm package registry.
+
+
+## SEE ALSO
+
+* npm-docs(1)
+* npm-view(1)
+* npm-publish(1)
+* npm-registry(1)
+* npm-config(1)
+* npm-json(1)
diff --git a/deps/npm/doc/cli/build.md b/deps/npm/doc/cli/build.md
new file mode 100644
index 0000000000..978f4a6d62
--- /dev/null
+++ b/deps/npm/doc/cli/build.md
@@ -0,0 +1,22 @@
+npm-build(1) -- Build a package
+===============================
+
+## SYNOPSIS
+
+ npm build <package-folder>
+
+* `<package-folder>`:
+ A folder containing a `package.json` file in its root.
+
+## DESCRIPTION
+
+This is the plumbing command called by `npm link` and `npm install`.
+
+It should generally not be called directly.
+
+## SEE ALSO
+
+* npm-install(1)
+* npm-link(1)
+* npm-scripts(1)
+* npm-json(1)
diff --git a/deps/npm/doc/cli/bundle.md b/deps/npm/doc/cli/bundle.md
new file mode 100644
index 0000000000..69b3d83e45
--- /dev/null
+++ b/deps/npm/doc/cli/bundle.md
@@ -0,0 +1,14 @@
+npm-bundle(1) -- REMOVED
+========================
+
+## DESCRIPTION
+
+The `npm bundle` command has been removed in 1.0, for the simple reason
+that it is no longer necessary, as the default behavior is now to
+install packages into the local space.
+
+Just use `npm install` now to do what `npm bundle` used to do.
+
+## SEE ALSO
+
+* npm-install(1)
diff --git a/deps/npm/doc/cli/cache.md b/deps/npm/doc/cli/cache.md
new file mode 100644
index 0000000000..1fa128ad44
--- /dev/null
+++ b/deps/npm/doc/cli/cache.md
@@ -0,0 +1,70 @@
+npm-cache(1) -- Manipulates packages cache
+==========================================
+
+## SYNOPSIS
+
+ npm cache add <tarball file>
+ npm cache add <folder>
+ npm cache add <tarball url>
+ npm cache add <name>@<version>
+
+ npm cache ls [<path>]
+
+ npm cache clean [<path>]
+
+## DESCRIPTION
+
+Used to add, list, or clear the npm cache folder.
+
+* add:
+ Add the specified package to the local cache. This command is primarily
+ intended to be used internally by npm, but it can provide a way to
+ add data to the local installation cache explicitly.
+
+* ls:
+ Show the data in the cache. Argument is a path to show in the cache
+ folder. Works a bit like the `find` program, but limited by the
+ `depth` config.
+
+* clean:
+ Delete data out of the cache folder. If an argument is provided, then
+ it specifies a subpath to delete. If no argument is provided, then
+ the entire cache is cleared.
+
+## DETAILS
+
+npm stores cache data in `$HOME/.npm`. For each package that is added
+to the cache, three pieces of information are stored in
+`{cache}/{name}/{version}`:
+
+* .../package/:
+ A folder containing the package contents as they appear in the tarball.
+* .../package.json:
+ The package.json file, as npm sees it, with overlays applied and a _id attribute.
+* .../package.tgz:
+ The tarball for that version.
+
+Additionally, whenever a registry request is made, a `.cache.json` file
+is placed at the corresponding URI, to store the ETag and the requested
+data.
+
+Commands that make non-essential registry requests (such as `search` and
+`view`, or the completion scripts) generally specify a minimum timeout.
+If the `.cache.json` file is younger than the specified timeout, then
+they do not make an HTTP request to the registry.
+
+## CONFIGURATION
+
+### cache
+
+Default: `$HOME/.npm` on Posix, or `$HOME/npm-cache` on Windows.
+
+The root cache folder.
+
+## SEE ALSO
+
+* npm-folders(1)
+* npm-config(1)
+* npm-install(1)
+* npm-publish(1)
+* npm-pack(1)
diff --git a/deps/npm/doc/cli/changelog.md b/deps/npm/doc/cli/changelog.md
new file mode 100644
index 0000000000..0115405ca0
--- /dev/null
+++ b/deps/npm/doc/cli/changelog.md
@@ -0,0 +1,36 @@
+npm-changelog(1) -- Changes
+===========================
+
+## HISTORY
+
+### 1.0
+* Greatly simplified folder structure
+* Install locally (bundle by default)
+* Drastic rearchitecture
+
+### 0.3
+* More correct permission/uid handling when running as root
+* Require node 0.4.0
+* Reduce featureset
+* Packages without "main" modules don't export modules
+* Remove support for invalid JSON (since node doesn't support it)
+
+### 0.2
+* First allegedly "stable" release
+* Most functionality implemented
+* Used shim files and `name@version` symlinks
+* Feature explosion
+* Kind of a mess
+
+### 0.1
+* push to beta, and announce
+* Solaris and Cygwin support
+
+### 0.0
+* Lots of sketches and false starts; abandoned a few times
+* Core functionality established
+
+## SEE ALSO
+
+* npm(1)
+* npm-faq(1)
diff --git a/deps/npm/doc/cli/coding-style.md b/deps/npm/doc/cli/coding-style.md
new file mode 100644
index 0000000000..f0640c85cd
--- /dev/null
+++ b/deps/npm/doc/cli/coding-style.md
@@ -0,0 +1,190 @@
+npm-coding-style(1) -- npm's "funny" coding style
+=================================================
+
+## DESCRIPTION
+
+npm's coding style is a bit unconventional. It is not different for
+difference's sake, but rather a carefully crafted style that is
+designed to reduce visual clutter and make bugs more apparent.
+
+If you want to contribute to npm (which is very encouraged), you should
+make your code conform to npm's style.
+
+## Line Length
+
+Keep lines shorter than 80 characters. It's better for lines to be
+too short than to be too long. Break up long lists, objects, and other
+statements onto multiple lines.
+
+## Indentation
+
+Two-spaces. Tabs are better, but they look like hell in web browsers
+(and on github), and node uses 2 spaces, so that's that.
+
+Configure your editor appropriately.
+
+## Curly braces
+
+Curly braces belong on the same line as the thing that necessitates them.
+
+Bad:
+
+ function ()
+ {
+
+Good:
+
+ function () {
+
+If a block needs to wrap to the next line, use a curly brace. Don't
+use it if it doesn't.
+
+Bad:
+
+ if (foo) { bar() }
+ while (foo)
+ bar()
+
+Good:
+
+ if (foo) bar()
+ while (foo) {
+ bar()
+ }
+
+## Semicolons
+
+Don't use them except in four situations:
+
+* `for (;;)` loops. They're actually required.
+* null loops like: `while (something) ;` (But you'd better have a good
+ reason for doing that.)
+* case "foo": doSomething(); break
+* In front of a leading ( or [ at the start of the line.
+ This prevents the expression from being interpreted
+ as a function call or property access, respectively.
+
+Some examples of good semicolon usage:
+
+ ;(x || y).doSomething()
+ ;[a, b, c].forEach(doSomething)
+ for (var i = 0; i < 10; i ++) {
+ switch (state) {
+ case "begin": start(); continue
+ case "end": finish(); break
+ default: throw new Error("unknown state")
+ }
+ end()
+ }
+
+Note that starting lines with `-` and `+` also should be prefixed
+with a semicolon, but this is much less common.
+
+## Comma First
+
+If there is a list of things separated by commas, and it wraps
+across multiple lines, put the comma at the start of the next
+line, directly below the token that starts the list. Put the
+final token in the list on a line by itself. For example:
+
+ var magicWords = [ "abracadabra"
+ , "gesundheit"
+ , "ventrilo"
+ ]
+ , spells = { "fireball" : function () { setOnFire() }
+ , "water" : function () { putOut() }
+ }
+ , a = 1
+ , b = "abc"
+ , etc
+ , somethingElse
+
+## Whitespace
+
+Put a single space in front of ( for anything other than a function call.
+Also use a single space wherever it makes things more readable.
+
+Don't leave trailing whitespace at the end of lines. Don't indent empty
+lines. Don't use more spaces than are helpful.
+
+## Functions
+
+Use named functions. They make stack traces a lot easier to read.
+
+## Callbacks, Sync/async Style
+
+Use the asynchronous/non-blocking versions of things as much as possible.
+It might make more sense for npm to use the synchronous fs APIs, but this
+way, the fs and http and child process stuff all uses the same callback-passing
+methodology.
+
+The callback should always be the last argument in the list. Its first
+argument is the Error or null.
+
+Be very careful never to ever ever throw anything. It's worse than useless.
+Just send the error message back as the first argument to the callback.
+
+## Errors
+
+Always create a new Error object with your message. Don't just return a
+string message to the callback. Stack traces are handy.
+
+Use the `require("./utils/log").er` function. It takes a callback and an
+error message, and returns an object that will report the message in the
+event of a failure. It's quite handy.
+
+ function myThing (args, cb) {
+ getData(args, function (er, data) {
+ if (er) return log.er(cb, "Couldn't get data")(er)
+ doSomethingElse(data, cb)
+ })
+ }
+ function justHasToWork (cb) {
+ doSomething(log.er(cb, "the doSomething failed."))
+ }
+
+## Logging
+
+Please clean up logs when they are no longer helpful. In particular,
+logging the same object over and over again is not helpful. Logs should
+report what's happening so that it's easier to track down where a fault
+occurs.
+
+Use appropriate log levels. The default log() function logs at the
+"info" level. See `npm-config(1)` and search for "loglevel".
+
+## Case, naming, etc.
+
+Use lowerCamelCase for multiword identifiers when they refer to objects,
+functions, methods, members, or anything not specified in this section.
+
+Use UpperCamelCase for class names (things that you'd pass to "new").
+
+Use all-lower-hyphen-css-case for multiword filenames and config keys.
+
+Use named functions. They make stack traces easier to follow.
+
+Use CAPS_SNAKE_CASE for constants, things that should never change
+and are rarely used.
+
+Use a single uppercase letter for function names where the function
+would normally be anonymous, but needs to call itself recursively. It
+makes it clear that it's a "throwaway" function.
+
+## null, undefined, false, 0
+
+Boolean variables and functions should always be either `true` or
+`false`. Don't set it to 0 unless it's supposed to be a number.
+
+When something is intentionally missing or removed, set it to `null`.
+
+Don't set things to `undefined`. Reserve that value to mean "not yet
+set to anything."
+
+Boolean objects are verboten.
+
+## SEE ALSO
+
+* npm-developers(1)
+* npm-faq(1)
+* npm(1)
diff --git a/deps/npm/doc/cli/completion.md b/deps/npm/doc/cli/completion.md
new file mode 100644
index 0000000000..48bc50fd87
--- /dev/null
+++ b/deps/npm/doc/cli/completion.md
@@ -0,0 +1,29 @@
+npm-completion(1) -- Tab Completion for npm
+===========================================
+
+## SYNOPSIS
+
+ . <(npm completion)
+
+## DESCRIPTION
+
+Enables tab-completion in all npm commands.
+
+The synopsis above
+loads the completions into your current shell. Adding it to
+your ~/.bashrc or ~/.zshrc will make the completions available
+everywhere.
+
+You may of course also pipe the output of npm completion to a file
+such as `/usr/local/etc/bash_completion.d/npm` if you have a system
+that will read that file for you.
+
+When `COMP_CWORD`, `COMP_LINE`, and `COMP_POINT` are defined in the
+environment, `npm completion` acts in "plumbing mode", and outputs
+completions based on the arguments.
+
+## SEE ALSO
+
+* npm-developers(1)
+* npm-faq(1)
+* npm(1)
diff --git a/deps/npm/doc/cli/config.md b/deps/npm/doc/cli/config.md
new file mode 100644
index 0000000000..1ede36292e
--- /dev/null
+++ b/deps/npm/doc/cli/config.md
@@ -0,0 +1,665 @@
+npm-config(1) -- Manage the npm configuration file
+==================================================
+
+## SYNOPSIS
+
+ npm config set <key> <value> [--global]
+ npm config get <key>
+ npm config delete <key>
+ npm config list
+ npm config edit
+ npm get <key>
+ npm set <key> <value> [--global]
+
+## DESCRIPTION
+
+npm gets its configuration values from 6 sources, in this priority:
+
+### Command Line Flags
+
+Putting `--foo bar` on the command line sets the
+`foo` configuration parameter to `"bar"`. A `--` argument tells the cli
+parser to stop reading flags. A `--flag` parameter that is at the *end* of
+the command will be given the value of `true`.
+
+### Environment Variables
+
+Any environment variables that start with `npm_config_` will be interpreted
+as a configuration parameter. For example, putting `npm_config_foo=bar` in
+your environment will set the `foo` configuration parameter to `bar`. Any
+environment configurations that are not given a value will be given the value
+of `true`. Config values are case-insensitive, so `NPM_CONFIG_FOO=bar` will
+work the same.
+
+### Per-user config file
+
+`$HOME/.npmrc` (or the `userconfig` param, if set above)
+
+This file is an ini-file formatted list of `key = value` parameters.
+
+### Global config file
+
+`$PREFIX/etc/npmrc` (or the `globalconfig` param, if set above):
+This file is an ini-file formatted list of `key = value` parameters
+
+### Built-in config file
+
+`path/to/npm/itself/npmrc`
+
+This is an unchangeable "builtin"
+configuration file that npm keeps consistent across updates. Set
+fields in here using the `./configure` script that comes with npm.
+This is primarily for distribution maintainers to override default
+configs in a standard and consistent manner.
+
+### Default Configs
+
+A set of configuration parameters that are internal to npm, and are
+defaults if nothing else is specified.
+
+## Sub-commands
+
+Config supports the following sub-commands:
+
+### set
+
+ npm config set key value
+
+Sets the config key to the value.
+
+If value is omitted, then it sets it to "true".
+
+### get
+
+ npm config get key
+
+Echo the config value to stdout.
+
+### list
+
+ npm config list
+
+Show all the config settings.
+
+### delete
+
+ npm config delete key
+
+Deletes the key from all configuration files.
+
+### edit
+
+ npm config edit
+
+Opens the config file in an editor. Use the `--global` flag to edit the
+global config.
+
+## Shorthands and Other CLI Niceties
+
+The following shorthands are parsed on the command-line:
+
+* `-v`: `--version`
+* `-h`, `-?`, `--help`, `-H`: `--usage`
+* `-s`, `--silent`: `--loglevel silent`
+* `-d`: `--loglevel info`
+* `-dd`, `--verbose`: `--loglevel verbose`
+* `-ddd`: `--loglevel silly`
+* `-g`: `--global`
+* `-l`: `--long`
+* `-m`: `--message`
+* `-p`, `--porcelain`: `--parseable`
+* `-reg`: `--registry`
+* `-v`: `--version`
+* `-f`: `--force`
+* `-l`: `--long`
+* `-desc`: `--description`
+* `-S`: `--save`
+* `-y`: `--yes`
+* `-n`: `--yes false`
+* `ll` and `la` commands: `ls --long`
+
+If the specified configuration param resolves unambiguously to a known
+configuration parameter, then it is expanded to that configuration
+parameter. For example:
+
+ npm ls --par
+ # same as:
+ npm ls --parseable
+
+If multiple single-character shorthands are strung together, and the
+resulting combination is unambiguously not some other configuration
+param, then it is expanded to its various component pieces. For
+example:
+
+ npm ls -gpld
+ # same as:
+ npm ls --global --parseable --long --loglevel info
+
+## Per-Package Config Settings
+
+When running scripts (see `npm-scripts(1)`)
+the package.json "config" keys are overwritten in the environment if
+there is a config param of `<name>[@<version>]:<key>`. For example, if
+the package.json has this:
+
+ { "name" : "foo"
+ , "config" : { "port" : "8080" }
+ , "scripts" : { "start" : "node server.js" } }
+
+and the server.js is this:
+
+ http.createServer(...).listen(process.env.npm_package_config_port)
+
+then the user could change the behavior by doing:
+
+ npm config set foo:port 80
+
+## Config Settings
+
+### always-auth
+
+* Default: false
+* Type: Boolean
+
+Force npm to always require authentication when accessing the registry,
+even for `GET` requests.
+
+### bin-publish
+
+* Default: false
+* Type: Boolean
+
+If set to true, then binary packages will be created on publish.
+
+This is the way to opt into the "bindist" behavior described below.
+
+### bindist
+
+* Default: Unstable node versions, `null`, otherwise
+ `"<node version>-<platform>-<os release>"`
+* Type: String or `null`
+
+Experimental: on stable versions of node, binary distributions will be
+created with this tag. If a user then installs that package, and their
+`bindist` tag is found in the list of binary distributions, they will
+get that prebuilt version.
+
+Pre-build node packages have their preinstall, install, and postinstall
+scripts stripped (since they are run prior to publishing), and do not
+have their `build` directories automatically ignored.
+
+It's yet to be seen if this is a good idea.
+
+### browser
+
+* Default: OS X: `"open"`, others: `"google-chrome"`
+* Type: String
+
+The browser that is called by the `npm docs` command to open websites.
+
+### ca
+
+* Default: The npm CA certificate
+* Type: String or null
+
+The Certificate Authority signing certificate that is trusted for SSL
+connections to the registry.
+
+Set to `null` to only allow "known" registrars, or to a specific CA cert
+to trust only that specific signing authority.
+
+See also the `strict-ssl` config.
+
+### cache
+
+* Default: Windows: `~/npm-cache`, Posix: `~/.npm`
+* Type: path
+
+The location of npm's cache directory. See `npm-cache(1)`
+
+### color
+
+* Default: true on Posix, false on Windows
+* Type: Boolean or `"always"`
+
+If false, never shows colors. If `"always"` then always shows colors.
+If true, then only prints color codes for tty file descriptors.
+
+### depth
+
+* Default: Infinity
+* Type: Number
+
+The depth to go when recursing directories for `npm ls` and
+`npm cache ls`.
+
+### description
+
+* Default: true
+* Type: Boolean
+
+Show the description in `npm search`
+
+### dev
+
+* Default: false
+* Type: Boolean
+
+Install `dev-dependencies` along with packages.
+
+Note that `dev-dependencies` are also installed if the `npat` flag is
+set.
+
+### editor
+
+* Default: `EDITOR` environment variable if set, or `"vi"` on Posix,
+ or `"notepad"` on Windows.
+* Type: path
+
+The command to run for `npm edit` or `npm config edit`.
+
+### force
+
+* Default: false
+* Type: Boolean
+
+Makes various commands more forceful.
+
+* lifecycle script failure does not block progress.
+* publishing clobbers previously published versions.
+* skips cache when requesting from the registry.
+* prevents checks against clobbering non-npm files.
+
+### global
+
+* Default: false
+* Type: Boolean
+
+Operates in "global" mode, so that packages are installed into the
+`prefix` folder instead of the current working directory. See
+`npm-folders(1)` for more on the differences in behavior.
+
+* packages are installed into the `prefix/node_modules` folder, instead of the
+ current working directory.
+* bin files are linked to `prefix/bin`
+* man pages are linked to `prefix/share/man`
+
+### globalconfig
+
+* Default: {prefix}/etc/npmrc
+* Type: path
+
+The config file to read for global config options.
+
+### globalignorefile
+
+* Default: {prefix}/etc/npmignore
+* Type: path
+
+The config file to read for global ignore patterns to apply to all users
+and all projects.
+
+If not found, but there is a "gitignore" file in the
+same directory, then that will be used instead.
+
+### group
+
+* Default: GID of the current process
+* Type: String or Number
+
+The group to use when running package scripts in global mode as the root
+user.
+
+### https-proxy
+
+* Default: the `HTTPS_PROXY` or `https_proxy` or `HTTP_PROXY` or
+ `http_proxy` environment variables.
+* Type: url
+
+A proxy to use for outgoing https requests.
+
+### ignore
+
+* Default: ""
+* Type: string
+
+A white-space separated list of glob patterns of files to always exclude
+from packages when building tarballs.
+
+### init.version
+
+* Default: "0.0.0"
+* Type: semver
+
+The value `npm init` should use by default for the package version.
+
+### init.author.name
+
+* Default: "0.0.0"
+* Type: String
+
+The value `npm init` should use by default for the package author's name.
+
+### init.author.email
+
+* Default: ""
+* Type: String
+
+The value `npm init` should use by default for the package author's email.
+
+### init.author.url
+
+* Default: ""
+* Type: String
+
+The value `npm init` should use by default for the package author's homepage.
+
+### link
+
+* Default: false
+* Type: Boolean
+
+If true, then local installs will link if there is a suitable globally
+installed package.
+
+Note that this means that local installs can cause things to be
+installed into the global space at the same time. The link is only done
+if one of the two conditions are met:
+
+* The package is not already installed globally, or
+* the globally installed version is identical to the version that is
+ being installed locally.
+
+### logfd
+
+* Default: stderr file descriptor
+* Type: Number or Stream
+
+The location to write log output.
+
+### loglevel
+
+* Default: "warn"
+* Type: String
+* Values: "silent", "win", "error", "warn", "info", "verbose", "silly"
+
+What level of logs to report. On failure, *all* logs are written to
+`npm-debug.log` in the current working directory.
+
+### logprefix
+
+* Default: true on Posix, false on Windows
+* Type: Boolean
+
+Whether or not to prefix log messages with "npm" and the log level. See
+also "color" and "loglevel".
+
+### long
+
+* Default: false
+* Type: Boolean
+
+Show extended information in `npm ls`
+
+### message
+
+* Default: "%s"
+* Type: String
+
+Commit message which is used by `npm version` when creating version commit.
+
+Any "%s" in the message will be replaced with the version number.
+
+### node-version
+
+* Default: process.version
+* Type: semver or false
+
+The node version to use when checking package's "engines" hash.
+
+### npat
+
+* Default: false
+* Type: Boolean
+
+Run tests on installation and report results to the
+`npaturl`.
+
+### npaturl
+
+* Default: Not yet implemented
+* Type: url
+
+The url to report npat test results.
+
+### onload-script
+
+* Default: false
+* Type: path
+
+A node module to `require()` when npm loads. Useful for programmatic
+usage.
+
+### outfd
+
+* Default: standard output file descriptor
+* Type: Number or Stream
+
+Where to write "normal" output. This has no effect on log output.
+
+### parseable
+
+* Default: false
+* Type: Boolean
+
+Output parseable results from commands that write to
+standard output.
+
+### prefix
+
+* Default: node's process.installPrefix
+* Type: path
+
+The location to install global items. If set on the command line, then
+it forces non-global commands to run in the specified folder.
+
+### production
+
+* Default: false
+* Type: Boolean
+
+Set to true to run in "production" mode.
+
+1. devDependencies are not installed at the topmost level when running
+ local `npm install` without any arguments.
+2. Set the NODE_ENV="production" for lifecycle scripts.
+
+### proxy
+
+* Default: `HTTP_PROXY` or `http_proxy` environment variable, or null
+* Type: url
+
+A proxy to use for outgoing http requests.
+
+### rebuild-bundle
+
+* Default: true
+* Type: Boolean
+
+Rebuild bundled dependencies after installation.
+
+### registry
+
+* Default: https://registry.npmjs.org/
+* Type: url
+
+The base URL of the npm package registry.
+
+### rollback
+
+* Default: true
+* Type: Boolean
+
+Remove failed installs.
+
+### save
+
+* Default: false
+* Type: Boolean
+
+Save installed packages to a package.json file as dependencies.
+
+Only works if there is already a package.json file present.
+
+### searchopts
+
+* Default: ""
+* Type: String
+
+Space-separated options that are always passed to search.
+
+### searchexclude
+
+* Default: ""
+* Type: String
+
+Space-separated options that limit the results from search.
+
+### shell
+
+* Default: SHELL environment variable, or "bash" on Posix, or "cmd" on
+ Windows
+* Type: path
+
+The shell to run for the `npm explore` command.
+
+### strict-ssl
+
+* Default: true
+* Type: Boolean
+
+Whether or not to do SSL key validation when making requests to the
+registry via https.
+
+See also the `ca` config.
+
+### tag
+
+* Default: latest
+* Type: String
+
+If you ask npm to install a package and don't tell it a specific version, then
+it will install the specified tag.
+
+Also the tag that is added to the package@version specified by the `npm
+tag` command, if no explicit tag is given.
+
+### tmp
+
+* Default: TMPDIR environment variable, or "/tmp"
+* Type: path
+
+Where to store temporary files and folders. All temp files are deleted
+on success, but left behind on failure for forensic purposes.
+
+### unicode
+
+* Default: true
+* Type: Boolean
+
+When set to true, npm uses unicode characters in the tree output. When
+false, it uses ascii characters to draw trees.
+
+### unsafe-perm
+
+* Default: false if running as root, true otherwise
+* Type: Boolean
+
+Set to true to suppress the UID/GID switching when running package
+scripts. If set explicitly to false, then installing as a non-root user
+will fail.
+
+### usage
+
+* Default: false
+* Type: Boolean
+
+Set to show short usage output (like the -H output)
+instead of complete help when doing `npm-help(1)`.
+
+### user
+
+* Default: "nobody"
+* Type: String or Number
+
+The UID to set to when running package scripts as root.
+
+### username
+
+* Default: null
+* Type: String
+
+The username on the npm registry. Set with `npm adduser`
+
+### userconfig
+
+* Default: ~/.npmrc
+* Type: path
+
+The location of user-level configuration settings.
+
+### userignorefile
+
+* Default: ~/.npmignore
+* Type: path
+
+The location of a user-level ignore file to apply to all packages.
+
+If not found, but there is a .gitignore file in the same directory, then
+that will be used instead.
+
+### umask
+
+* Default: 022
+* Type: Octal numeric string
+
+The "umask" value to use when setting the file creation mode on files
+and folders.
+
+Folders and executables are given a mode which is `0777` masked against
+this value. Other files are given a mode which is `0666` masked against
+this value. Thus, the defaults are `0755` and `0644` respectively.
+
+### version
+
+* Default: false
+* Type: boolean
+
+If true, output the npm version and exit successfully.
+
+Only relevant when specified explicitly on the command line.
+
+### viewer
+
+* Default: "man" on Posix, "browser" on Windows
+* Type: path
+
+The program to use to view help content.
+
+Set to `"browser"` to view html help content in the default web browser.
+
+### yes
+
+* Default: null
+* Type: Boolean or null
+
+If set to `null`, then prompt the user for responses in some
+circumstances.
+
+If set to `true`, then answer "yes" to any prompt. If set to `false`
+then answer "no" to any prompt.
+
+## SEE ALSO
+
+* npm-folders(1)
+* npm(1)
diff --git a/deps/npm/doc/cli/deprecate.md b/deps/npm/doc/cli/deprecate.md
new file mode 100644
index 0000000000..925337f21a
--- /dev/null
+++ b/deps/npm/doc/cli/deprecate.md
@@ -0,0 +1,24 @@
+npm-deprecate(1) -- Deprecate a version of a package
+====================================================
+
+## SYNOPSIS
+
+ npm deprecate <name>[@<version>] <message>
+
+## DESCRIPTION
+
+This command will update the npm registry entry for a package, providing
+a deprecation warning to all who attempt to install it.
+
+It works on version ranges as well as specific versions, so you can do
+something like this:
+
+ npm deprecate my-thing@"< 0.2.3" "critical bug fixed in v0.2.3"
+
+Note that you must be the package owner to deprecate something. See the
+`owner` and `adduser` help topics.
+
+## SEE ALSO
+
+* npm-publish(1)
+* npm-registry(1)
diff --git a/deps/npm/doc/cli/developers.md b/deps/npm/doc/cli/developers.md
new file mode 100644
index 0000000000..0f0f94c588
--- /dev/null
+++ b/deps/npm/doc/cli/developers.md
@@ -0,0 +1,172 @@
+npm-developers(1) -- Developer Guide
+====================================
+
+## DESCRIPTION
+
+So, you've decided to use npm to develop (and maybe publish/deploy)
+your project.
+
+Fantastic!
+
+There are a few things that you need to do above the simple steps
+that your users will do to install your program.
+
+## About These Documents
+
+These are man pages. If you install npm, you should be able to
+then do `man npm-thing` to get the documentation on a particular
+topic, or `npm help thing` to see the same information.
+
+## What is a `package`
+
+A package is:
+
+* a) a folder containing a program described by a package.json file
+* b) a gzipped tarball containing (a)
+* c) a url that resolves to (b)
+* d) a `<name>@<version>` that is published on the registry with (c)
+* e) a `<name>@<tag>` that points to (d)
+* f) a `<name>` that has a "latest" tag satisfying (e)
+
+Even if you never publish your package, you can still get a lot of
+benefits of using npm if you just want to write a node program (a), and
+perhaps if you also want to be able to easily install it elsewhere
+after packing it up into a tarball (b).
+
+## The package.json File
+
+You need to have a `package.json` file in the root of your project to do
+much of anything with npm. That is basically the whole interface.
+
+See `npm-json(1)` for details about what goes in that file. At the very
+least, you need:
+
+* name:
+ This should be a string that identifies your project. Please do not
+ use the name to specify that it runs on node, or is in JavaScript.
+ You can use the "engines" field to explicitly state the versions of
+ node (or whatever else) that your program requires, and it's pretty
+ well assumed that it's javascript.
+
+ It does not necessarily need to match your github repository name.
+
+ So, `node-foo` and `bar-js` are bad names. `foo` or `bar` are better.
+
+* version:
+ A semver-compatible version.
+
+* engines:
+ Specify the versions of node (or whatever else) that your program
+ runs on. The node API changes a lot, and there may be bugs or new
+ functionality that you depend on. Be explicit.
+
+* author:
+ Take some credit.
+
+* scripts:
+ If you have a special compilation or installation script, then you
+ should put it in the `scripts` hash. You should definitely have at
+ least a basic smoke-test command as the "scripts.test" field.
+ See npm-scripts(1).
+
+* main:
+ If you have a single module that serves as the entry point to your
+ program (like what the "foo" package gives you at require("foo")),
+ then you need to specify that in the "main" field.
+
+* directories:
+ This is a hash of folders. The best ones to include are "lib" and
+ "doc", but if you specify a folder full of man pages in "man", then
+ they'll get installed just like these ones.
+
+You can use `npm init` in the root of your package in order to get you
+started with a pretty basic package.json file. See `npm-init(1)` for
+more info.
+
+## Keeping files *out* of your package
+
+Use a `.npmignore` file to keep stuff out of your package. If there's
+no .npmignore file, but there *is* a .gitignore file, then npm will
+ignore the stuff matched by the .gitignore file. If you *want* to
+include something that is excluded by your .gitignore file, you can
+create an empty .npmignore file to override it.
+
+## Link Packages
+
+`npm link` is designed to install a development package and see the
+changes in real time without having to keep re-installing it. (You do
+need to either re-link or `npm rebuild -g` to update compiled packages,
+of course.)
+
+More info at `npm-link(1)`.
+
+## Before Publishing: Make Sure Your Package Installs and Works
+
+**This is important.**
+
+If you can not install it locally, you'll have
+problems trying to publish it. Or, worse yet, you'll be able to
+publish it, but you'll be publishing a broken or pointless package.
+So don't do that.
+
+In the root of your package, do this:
+
+ npm install . -g
+
+That'll show you that it's working. If you'd rather just create a symlink
+package that points to your working directory, then do this:
+
+ npm link
+
+Use `npm ls -g` to see if it's there.
+
+To test a local install, go into some other folder, and then do:
+
+ cd ../some-other-folder
+ npm install ../my-package
+
+to install it locally into the node_modules folder in that other place.
+
+Then go into the node-repl, and try using require("my-thing") to
+bring in your module's main module.
+
+## Create a User Account
+
+Create a user with the adduser command. It works like this:
+
+ npm adduser
+
+and then follow the prompts.
+
+This is documented better in npm-adduser(1).
+
+## Publish your package
+
+This part's easy. IN the root of your folder, do this:
+
+ npm publish
+
+You can give publish a url to a tarball, or a filename of a tarball,
+or a path to a folder.
+
+Note that pretty much **everything in that folder will be exposed**
+by default. So, if you have secret stuff in there, use a `.npminclude`
+or `.npmignore` file to list out the globs to include/ignore, or publish
+from a fresh checkout.
+
+## Brag about it
+
+Send emails, write blogs, blab in IRC.
+
+Tell the world how easy it is to install your program!
+
+## SEE ALSO
+
+* npm-faq(1)
+* npm(1)
+* npm-init(1)
+* npm-json(1)
+* npm-scripts(1)
+* npm-publish(1)
+* npm-adduser(1)
+* npm-registry(1)
diff --git a/deps/npm/doc/cli/docs.md b/deps/npm/doc/cli/docs.md
new file mode 100644
index 0000000000..26b2455dd8
--- /dev/null
+++ b/deps/npm/doc/cli/docs.md
@@ -0,0 +1,38 @@
+npm-docs(1) -- Docs for a package in a web browser maybe
+========================================================
+
+## SYNOPSIS
+
+ npm docs <pkgname>
+ npm home <pkgname>
+
+## DESCRIPTION
+
+This command tries to guess at the likely location of a package's
+documentation URL, and then tries to open it using the `--browser`
+config param.
+
+## CONFIGURATION
+
+### browser
+
+* Default: OS X: `"open"`, others: `"google-chrome"`
+* Type: String
+
+The browser that is called by the `npm docs` command to open websites.
+
+### registry
+
+* Default: https://registry.npmjs.org/
+* Type: url
+
+The base URL of the npm package registry.
+
+
+## SEE ALSO
+
+* npm-view(1)
+* npm-publish(1)
+* npm-registry(1)
+* npm-config(1)
+* npm-json(1)
diff --git a/deps/npm/doc/cli/edit.md b/deps/npm/doc/cli/edit.md
new file mode 100644
index 0000000000..9eaccfc540
--- /dev/null
+++ b/deps/npm/doc/cli/edit.md
@@ -0,0 +1,35 @@
+npm-edit(1) -- Edit an installed package
+========================================
+
+## SYNOPSIS
+
+ npm edit <name>[@<version>]
+
+## DESCRIPTION
+
+Opens the package folder in the default editor (or whatever you've
+configured as the npm `editor` config -- see `npm-config(1)`.)
+
+After it has been edited, the package is rebuilt so as to pick up any
+changes in compiled packages.
+
+For instance, you can do `npm install connect` to install connect
+into your package, and then `npm edit connect` to make a few
+changes to your locally installed copy.
+
+## CONFIGURATION
+
+### editor
+
+* Default: `EDITOR` environment variable if set, or `"vi"` on Posix,
+ or `"notepad"` on Windows.
+* Type: path
+
+The command to run for `npm edit` or `npm config edit`.
+
+## SEE ALSO
+
+* npm-folders(1)
+* npm-explore(1)
+* npm-install(1)
+* npm-config(1)
diff --git a/deps/npm/doc/cli/explore.md b/deps/npm/doc/cli/explore.md
new file mode 100644
index 0000000000..00701b392a
--- /dev/null
+++ b/deps/npm/doc/cli/explore.md
@@ -0,0 +1,40 @@
+npm-explore(1) -- Browse an installed package
+=============================================
+
+## SYNOPSIS
+
+ npm explore <name>[@<version>] [ -- <cmd>]
+
+## DESCRIPTION
+
+Spawn a subshell in the directory of the installed package specified.
+
+If a command is specified, then it is run in the subshell, which then
+immediately terminates.
+
+This is particularly handy in the case of git submodules in the
+`node_modules` folder:
+
+ npm explore some-dependency -- git pull origin master
+
+Note that the package is *not* automatically rebuilt afterwards, so be
+sure to use `npm rebuild <pkg>` if you make any changes.
+
+## CONFIGURATION
+
+### shell
+
+* Default: SHELL environment variable, or "bash" on Posix, or "cmd" on
+ Windows
+* Type: path
+
+The shell to run for the `npm explore` command.
+
+## SEE ALSO
+
+* npm-submodule(1)
+* npm-folders(1)
+* npm-edit(1)
+* npm-rebuild(1)
+* npm-build(1)
+* npm-install(1)
diff --git a/deps/npm/doc/cli/faq.md b/deps/npm/doc/cli/faq.md
new file mode 100644
index 0000000000..15bb0c637d
--- /dev/null
+++ b/deps/npm/doc/cli/faq.md
@@ -0,0 +1,223 @@
+npm-faq(1) -- Frequently Asked Questions
+========================================
+
+## Where can I find these docs in HTML?
+
+<http://npmjs.org/doc/>, or run:
+
+ npm config set viewer browser
+
+to open these documents in your default web browser rather than `man`.
+
+## It didn't work.
+
+That's not really a question.
+
+## Why didn't it work?
+
+I don't know yet.
+
+Read the error output, and if you can't figure out what it means,
+do what it says and post a bug with all the information it asks for.
+
+## Where does npm put stuff?
+
+See `npm-folders(1)`
+
+tl;dr:
+
+* Use the `npm root` command to see where modules go, and the `npm bin`
+ command to see where executables go
+* Global installs are different from local installs. If you install
+ something with the `-g` flag, then its executables go in `npm bin -g`
+ and its modules go in `npm root -g`.
+
+## How do I install something everywhere?
+
+Install it globally by tacking `-g` or `--global` to the command.
+
+## I installed something globally, but I can't `require()` it
+
+Install it locally.
+
+## I don't wanna.
+
+Check out `npm link`. You might like it.
+
+## No, I really want 0.x style 'everything global' style.
+
+Ok, fine. Do this:
+
+ echo 'export NODE_PATH="'$(npm root -g)'"' >> ~/.bashrc
+ . ~/.bashrc
+ npm config set global true
+
+This is not recommended.
+
+Many things **will not work** if you do this. Make sure you read and
+understand `npm-config(1)` and `npm-global(1)` before you complain
+about things being broken.
+
+When you realize what a mistake it was, do this to switch back:
+
+ npm config delete global --local
+
+## If 'npm' is an acronym, why is it never capitalized?
+
+Contrary to the belief of many, "npm" is not in fact an abbreviation for
+"Node Package Manager". It is a recursive bacronymic abbreviation for
+"npm is not an acronym". (If it was "ninaa", then it would be an
+acronym, and thus incorrectly named.)
+
+"NPM", however, *is* an acronym (more precisely, a capitonym) for the
+National Association of Pastoral Musicians. You can learn more
+about them at <http://npm.org/>.
+
+In software, "NPM" is a non-parametric mapping utility written by
+Chris Rorden. You can analyze pictures of brains with it. Learn more
+about the (capitalized) NPM program at <http://www.cabiatl.com/mricro/npm/>.
+
+The first seed that eventually grew into this flower was a bash utility
+named "pm", which was a shortened descendent of "pkgmakeinst", a
+bash function that was used to install various different things on different
+platforms, most often using Yahoo's `yinst`. If `npm` was ever an
+acronym for anything, it was `node pm` or maybe `new pm`.
+
+So, in all seriousness, the "npm" project is named after its command-line
+utility, which was organically selected to be easily typed by a right-handed
+programmer using a US QWERTY keyboard layout, ending with the
+right-ring-finger in a postition to type the `-` key for flags and
+other command-line arguments. That command-line utility is always
+lower-case, though it starts most sentences it is a part of.
+
+## How do I list installed packages?
+
+`npm ls`
+
+## How do I search for packages?
+
+`npm search`
+
+Arguments are greps. `npm search jsdom` shows jsdom packages.
+
+## How do I update npm?
+
+ npm update npm -g
+
+You can also update all outdated local packages by doing `npm update` without
+any arguments, or global packages by doing `npm update -g`.
+
+Occasionally, the version of npm will progress such that the current
+version cannot be properly installed with the version that you have
+installed already. (Consider, if there is ever a bug in the `update`
+command.)
+
+In those cases, you can do this:
+
+ curl http://npmjs.org/install.sh | sh
+
+## What is a `package`?
+
+A package is:
+
+* a) a folder containing a program described by a package.json file
+* b) a gzipped tarball containing (a)
+* c) a url that resolves to (b)
+* d) a `<name>@<version>` that is published on the registry with (c)
+* e) a `<name>@<tag>` that points to (d)
+* f) a `<name>` that has a "latest" tag satisfying (e)
+* g) a `git` url that, when cloned, results in (a).
+
+Even if you never publish your package, you can still get a lot of
+benefits of using npm if you just want to write a node program (a), and
+perhaps if you also want to be able to easily install it elsewhere
+after packing it up into a tarball (b).
+
+Git urls can be of the form:
+
+ git://github.com/user/project.git#commit-ish
+ git+ssh://user@hostname:project.git#commit-ish
+ git+http://user@hostname/project/blah.git#commit-ish
+ git+https://user@hostname/project/blah.git#commit-ish
+
+The `commit-ish` can be any tag, sha, or branch which can be supplied as
+an argument to `git checkout`. The default is `master`.
+
+## How do I install node with npm?
+
+You don't. Try one of these:
+
+* <http://github.com/isaacs/nave>
+* <http://github.com/visionmedia/n>
+* <http://github.com/creationix/nvm>
+
+## How can I use npm for development?
+
+See `npm-developers(1)` and `npm-json(1)`.
+
+You'll most likely want to `npm link` your development folder. That's
+awesomely handy.
+
+To set up your own private registry, check out `npm-registry(1)`.
+
+## Can I list a url as a dependency?
+
+Yes. It should be a url to a gzipped tarball containing a single folder
+that has a package.json in its root, or a git url.
+(See "what is a package?" above.)
+
+## How do I symlink to a dev folder so I don't have to keep re-installing?
+
+See `npm-link(1)`
+
+## The package registry website. What is that exactly?
+
+See `npm-registry(1)`.
+
+## What's up with the insecure channel warnings?
+
+Until node 0.4.10, there were problems sending big files over HTTPS. That
+means that publishes go over HTTP by default in those versions of node.
+
+## I forgot my password, and can't publish. How do I reset it?
+
+Go to <http://admin.npmjs.org/reset>.
+
+## I get ECONNREFUSED a lot. What's up?
+
+Either the registry is down, or node's DNS isn't able to reach out.
+This happens a lot if you don't follow *all* the steps in the Cygwin
+setup doc.
+
+To check if the registry is down, open up
+<http://registry.npmjs.org/-/short>
+in a web browser. This will also tell you if you are just unable to
+access the internet for some reason.
+
+If the registry IS down, let me know by emailing or posting an issue.
+We'll have someone kick it or something.
+
+## Who does npm?
+
+`npm view npm author`
+
+`npm view npm contributors`
+
+## I have a question or request not addressed here. Where should I put it?
+
+Discuss it on the mailing list, or post an issue.
+
+* <npm-@googlegroups.com>
+* <http://github.com/isaacs/npm/issues>
+
+## Why does npm hate me?
+
+npm is not capable of hatred. It loves everyone, especially you.
+
+## SEE ALSO
+
+* npm(1)
+* npm-developers(1)
+* npm-json(1)
+* npm-config(1)
+* npm-folders(1)
diff --git a/deps/npm/doc/cli/find.md b/deps/npm/doc/cli/find.md
new file mode 120000
index 0000000000..9b687d1c19
--- /dev/null
+++ b/deps/npm/doc/cli/find.md
@@ -0,0 +1 @@
+search.md \ No newline at end of file
diff --git a/deps/npm/doc/cli/folders.md b/deps/npm/doc/cli/folders.md
new file mode 100644
index 0000000000..20358612ad
--- /dev/null
+++ b/deps/npm/doc/cli/folders.md
@@ -0,0 +1,209 @@
+npm-folders(1) -- Folder Structures Used by npm
+===============================================
+
+## DESCRIPTION
+
+npm puts various things on your computer. That's its job.
+
+This document will tell you what it puts where.
+
+### tl;dr
+
+* Local install (default): puts stuff in `./node_modules` of the current
+ package root.
+* Global install (with `-g`): puts stuff in /usr/local or wherever node
+ is installed.
+* Install it **locally** if you're going to `require()` it.
+* Install it **globally** if you're going to run it on the command line.
+* If you need both, then install it in both places, or use `npm link`.
+
+### prefix Configuration
+
+The `prefix` config defaults to the location where node is installed.
+On most systems, this is `/usr/local`, and most of the time is the same
+as node's `process.installPrefix`.
+
+On windows, this is the exact location of the node.exe binary. On Unix
+systems, it's one level up, since node is typically installed at
+`{prefix}/bin/node` rather than `{prefix}/node.exe`.
+
+When the `global` flag is set, npm installs things into this prefix.
+When it is not set, it uses the root of the current package, or the
+current working directory if not in a package already.
+
+### Node Modules
+
+Packages are dropped into the `node_modules` folder under the `prefix`.
+When installing locally, this means that you can
+`require("packagename")` to load its main module, or
+`require("packagename/lib/path/to/sub/module")` to load other modules.
+
+Global installs on Unix systems go to `{prefix}/lib/node_modules`.
+Global installs on Windows go to `{prefix}/node_modules` (that is, no
+`lib` folder.)
+
+If you wish to `require()` a package, then install it locally.
+
+### Executables
+
+When in global mode, executables are linked into `{prefix}/bin` on Unix,
+or directly into `{prefix}` on Windows.
+
+When in local mode, executables are linked into
+`./node_modules/.bin` so that they can be made available to scripts run
+through npm. (For example, so that a test runner will be in the path
+when you run `npm test`.)
+
+### Man Pages
+
+When in global mode, man pages are linked into `{prefix}/share/man`.
+
+When in local mode, man pages are not installed.
+
+Man pages are not installed on Windows systems.
+
+### Cache
+
+See `npm-cache(1)`. Cache files are stored in `~/.npm` on Posix, or
+`~/npm-cache` on Windows.
+
+This is controlled by the `cache` configuration param.
+
+### Temp Files
+
+Temporary files are stored by default in the folder specified by the
+`tmp` config, which defaults to the TMPDIR, TMP, or TEMP environment
+variables, or `/tmp` on Unix and `c:\windows\temp` on Windows.
+
+Temp files are given a unique folder under this root for each run of the
+program, and are deleted upon successful exit.
+
+## More Information
+
+When installing locally, npm first tries to find an appropriate
+`prefix` folder. This is so that `npm install foo@1.2.3` will install
+to the sensible root of your package, even if you happen to have `cd`ed
+into some other folder.
+
+Starting at the $PWD, npm will walk up the folder tree checking for a
+folder that contains either a `package.json` file, or a `node_modules`
+folder. If such a thing is found, then that is treated as the effective
+"current directory" for the purpose of running npm commands. (This
+behavior is inspired by and similar to git's .git-folder seeking
+logic when running git commands in a working dir.)
+
+If no package root is found, then the current folder is used.
+
+When you run `npm install foo@1.2.3`, then the package is loaded into
+the cache, and then unpacked into `./node_modules/foo`. Then, any of
+foo's dependencies are similarly unpacked into
+`./node_modules/foo/node_modules/...`.
+
+Any bin files are symlinked to `./node_modules/.bin/`, so that they may
+be found by npm scripts when necessary.
+
+### Global Installation
+
+If the `global` configuration is set to true, then npm will
+install packages "globally".
+
+For global installation, packages are installed roughly the same way,
+but using the folders described above.
+
+### Cycles, Conflicts, and Folder Parsimony
+
+Cycles are handled using the property of node's module system that it
+walks up the directories looking for `node_modules` folders. So, at every
+stage, if a package is already installed in an ancestor `node_modules`
+folder, then it is not installed at the current location.
+
+Consider the case above, where `foo -> bar -> baz`. Imagine if, in
+addition to that, baz depended on bar, so you'd have:
+`foo -> bar -> baz -> bar -> baz ...`. However, since the folder
+structure is: `foo/node_modules/bar/node_modules/baz`, there's no need to
+put another copy of bar into `.../baz/node_modules`, since when it calls
+require("bar"), it will get the copy that is installed in
+`foo/node_modules/bar`.
+
+This shortcut is only used if the exact same
+version would be installed in multiple nested `node_modules` folders. It
+is still possible to have `a/node_modules/b/node_modules/a` if the two
+"a" packages are different versions. However, without repeating the
+exact same package multiple times, an infinite regress will always be
+prevented.
+
+Another optimization can be made by installing dependencies at the
+highest level possible, below the localized "target" folder.
+
+#### Example
+
+Consider this dependency graph:
+
+ foo
+ +-- blerg@1.2.5
+ +-- bar@1.2.3
+ | +-- blerg@1.x (latest=1.3.7)
+ | +-- baz@2.x
+ | | `-- quux@3.x
+ | | `-- bar@1.2.3 (cycle)
+ | `-- asdf@*
+ `-- baz@1.2.3
+ `-- quux@3.x
+ `-- bar
+
+In this case, we might expect a folder structure like this:
+
+ foo
+ +-- node_modules
+ +-- blerg (1.2.5) <---[A]
+ +-- bar (1.2.3) <---[B]
+ | +-- node_modules
+ | | `-- baz (2.0.2) <---[C]
+ | | `-- node_modules
+ | | `-- quux (3.2.0)
+ | `-- asdf (2.3.4)
+ `-- baz (1.2.3) <---[D]
+ `-- node_modules
+ `-- quux (3.2.0) <---[E]
+
+Since foo depends directly on bar@1.2.3 and baz@1.2.3, those are
+installed in foo's `node_modules` folder.
+
+Even though the latest copy of blerg is 1.3.7, foo has a specific
+dependency on version 1.2.5. So, that gets installed at [A]. Since the
+parent installation of blerg satisfie's bar's dependency on blerg@1.x,
+it does not install another copy under [B].
+
+Bar [B] also has dependencies on baz and asdf, so those are installed in
+bar's `node_modules` folder. Because it depends on `baz@2.x`, it cannot
+re-use the `baz@1.2.3` installed in the parent `node_modules` folder [D],
+and must install its own copy [C].
+
+Underneath bar, the `baz->quux->bar` dependency creates a cycle.
+However, because `bar` is already in `quux`'s ancestry [B], it does not
+unpack another copy of bar into that folder.
+
+Underneath `foo->baz` [D], quux's [E] folder tree is empty, because its
+dependency on bar is satisfied by the parent folder copy installed at [B].
+
+For a graphical breakdown of what is installed where, use `npm ls`.
+
+### Publishing
+
+Upon publishing, npm will look in the `node_modules` folder. If any of
+the items there are not in the `bundledDependencies` array, then they will
+not be included in the package tarball.
+
+This allows a package maintainer to install all of their dependencies
+(and dev dependencies) locally, but only re-publish those items that
+cannot be found elsewhere. See `npm-json(1)` for more information.
+
+## SEE ALSO
+
+* npm-faq(1)
+* npm-json(1)
+* npm-install(1)
+* npm-pack(1)
+* npm-cache(1)
+* npm-config(1)
+* npm-publish(1)
diff --git a/deps/npm/doc/cli/get.md b/deps/npm/doc/cli/get.md
new file mode 120000
index 0000000000..3dc8737366
--- /dev/null
+++ b/deps/npm/doc/cli/get.md
@@ -0,0 +1 @@
+config.md \ No newline at end of file
diff --git a/deps/npm/doc/cli/global.md b/deps/npm/doc/cli/global.md
new file mode 120000
index 0000000000..c3598dd7df
--- /dev/null
+++ b/deps/npm/doc/cli/global.md
@@ -0,0 +1 @@
+folders.md \ No newline at end of file
diff --git a/deps/npm/doc/cli/help-search.md b/deps/npm/doc/cli/help-search.md
new file mode 100644
index 0000000000..9c16901ebb
--- /dev/null
+++ b/deps/npm/doc/cli/help-search.md
@@ -0,0 +1,35 @@
+npm-help-search(1) -- Search npm help documentation
+===================================================
+
+## SYNOPSIS
+
+ npm help-search some search terms
+
+## DESCRIPTION
+
+This command will search the npm markdown documentation files for the
+terms provided, and then list the results, sorted by relevance.
+
+If only one result is found, then it will show that help topic.
+
+If the argument to `npm help` is not a known help topic, then it will
+call `help-search`. It is rarely if ever necessary to call this
+command directly.
+
+## CONFIGURATION
+
+### long
+
+* Type: Boolean
+* Default false
+
+If true, the "long" flag will cause help-search to output context around
+where the terms were found in the documentation.
+
+If false, then help-search will just list out the help topics found.
+
+## SEE ALSO
+
+* npm(1)
+* npm-faq(1)
+* npm-help(1)
diff --git a/deps/npm/doc/cli/help.md b/deps/npm/doc/cli/help.md
new file mode 100644
index 0000000000..b51b0f1649
--- /dev/null
+++ b/deps/npm/doc/cli/help.md
@@ -0,0 +1,38 @@
+npm-help(1) -- Get help on npm
+==============================
+
+## SYNOPSIS
+
+ npm help <topic>
+ npm help some search terms
+
+## DESCRIPTION
+
+If supplied a topic, then show the appropriate documentation page.
+
+If the topic does not exist, or if multiple terms are provided, then run
+the `help-search` command to find a match. Note that, if `help-search`
+finds a single subject, then it will run `help` on that topic, so unique
+matches are equivalent to specifying a topic name.
+
+## CONFIGURATION
+
+### viewer
+
+* Default: "man" on Posix, "browser" on Windows
+* Type: path
+
+The program to use to view help content.
+
+Set to `"browser"` to view html help content in the default web browser.
+
+## SEE ALSO
+
+* npm(1)
+* README
+* npm-faq(1)
+* npm-folders(1)
+* npm-config(1)
+* npm-json(1)
+* npm-help-search(1)
+* npm-index(1)
diff --git a/deps/npm/doc/cli/home.md b/deps/npm/doc/cli/home.md
new file mode 120000
index 0000000000..8828313f5b
--- /dev/null
+++ b/deps/npm/doc/cli/home.md
@@ -0,0 +1 @@
+docs.md \ No newline at end of file
diff --git a/deps/npm/doc/cli/init.md b/deps/npm/doc/cli/init.md
new file mode 100644
index 0000000000..39297b4c4d
--- /dev/null
+++ b/deps/npm/doc/cli/init.md
@@ -0,0 +1,24 @@
+npm-init(1) -- Interactively create a package.json file
+=======================================================
+
+## SYNOPSIS
+
+ npm init
+
+## DESCRIPTION
+
+This will ask you a bunch of questions, and then write a package.json for you.
+
+It attempts to make reasonable guesses about what you want things to be set to,
+and then writes a package.json file with the options you've selected.
+
+If you already have a package.json file, it'll read that first, and default to
+the options in there.
+
+It is strictly additive, so it does not delete options from your package.json
+without a really good reason to do so.
+
+## SEE ALSO
+
+* npm-json(1)
+* npm-version(1)
diff --git a/deps/npm/doc/cli/install.md b/deps/npm/doc/cli/install.md
new file mode 100644
index 0000000000..22eb8234e7
--- /dev/null
+++ b/deps/npm/doc/cli/install.md
@@ -0,0 +1,201 @@
+npm-install(1) -- Install a package
+===================================
+
+## SYNOPSIS
+
+ npm install (with no args in a package dir)
+ npm install <tarball file>
+ npm install <tarball url>
+ npm install <folder>
+ npm install <name>
+ npm install <name>@<tag>
+ npm install <name>@<version>
+ npm install <name>@<version range>
+
+## DESCRIPTION
+
+This command installs a package, and any packages that it depends on.
+
+A `package` is:
+
+* a) a folder containing a program described by a package.json file
+* b) a gzipped tarball containing (a)
+* c) a url that resolves to (b)
+* d) a `<name>@<version>` that is published on the registry with (c)
+* e) a `<name>@<tag>` that points to (d)
+* f) a `<name>` that has a "latest" tag satisfying (e)
+* g) a `<git remote url>` that resolves to (b)
+
+Even if you never publish your package, you can still get a lot of
+benefits of using npm if you just want to write a node program (a), and
+perhaps if you also want to be able to easily install it elsewhere
+after packing it up into a tarball (b).
+
+
+* `npm install` (in package directory, no arguments):
+ Install the dependencies in the local node_modules folder.
+
+ In global mode (ie, with `-g` or `--global` appended to the command),
+ it installs the current package context (ie, the current working
+ directory) as a global package.
+
+* `npm install <folder>`:
+ Install a package that is sitting in a folder on the filesystem.
+
+* `npm install <tarball file>`:
+ Install a package that is sitting on the filesystem. Note: if you just want
+ to link a dev directory into your npm root, you can do this more easily by
+ using `npm link`.
+
+ Example:
+
+ npm install ./package.tgz
+
+* `npm install <tarball url>`:
+ Fetch the tarball url, and then install it. In order to distinguish between
+ this and other options, the argument must start with "http://" or "https://"
+
+ Example:
+
+ npm install https://github.com/indexzero/forever/tarball/v0.5.6
+
+* `npm install <name>`:
+ Do a `<name>@<tag>` install, where `<tag>` is the "tag" config. (See
+ `npm-config(1)`)
+
+ Example:
+
+ npm install sax
+
+ **Note**: If there is a file or folder named `<name>` in the current
+ working directory, then it will try to install that, and only try to
+ fetch the package by name if it is not valid.
+
+* `npm install <name>@<tag>`:
+ Install the version of the package that is referenced by the specified tag.
+ If the tag does not exist in the registry data for that package, then this
+ will fail.
+
+ Example:
+
+ npm install sax@latest
+
+* `npm install <name>@<version>`:
+ Install the specified version of the package. This will fail if the version
+ has not been published to the registry.
+
+ Example:
+
+ npm install sax@0.1.1
+
+* `npm install <name>@<version range>`:
+ Install a version of the package matching the specified version range. This
+ will follow the same rules for resolving dependencies described in `npm-json(1)`.
+
+ Note that most version ranges must be put in quotes so that your shell will
+ treat it as a single argument.
+
+ Example:
+
+ npm install sax@">=0.1.0 <0.2.0"
+
+* `npm install <git remote url>`:
+
+ Install a package by cloning a git remote url. The format of the git
+ url is:
+
+ <protocol>://[<user>@]<hostname><separator><path>[#<commit-ish>]
+
+ `<protocol>` is one of `git`, `git+ssh`, `git+http`, or
+ `git+https`. If no `<commit-ish>` is specified, then `master` is
+ used.
+
+ Examples:
+
+ git+ssh://git@github.com:isaacs/npm.git#v1.0.27
+ git+https://isaacs@github.com/isaacs/npm.git
+ git://github.com/isaacs/npm.git#v1.0.27
+
+You may combine multiple arguments, and even multiple types of arguments.
+For example:
+
+ npm install sax@">=0.1.0 <0.2.0" bench supervisor
+
+The `--tag` argument will apply to all of the specified install targets.
+
+The `--force` argument will force npm to fetch remote resources even if a
+local copy exists on disk.
+
+ npm install sax --force
+
+The `--global` argument will cause npm to install the package globally
+rather than locally. See `npm-global(1)`.
+
+The `--link` argument will cause npm to link global installs into the
+local space in some cases.
+
+See `npm-config(1)`. Many of the configuration params have some
+effect on installation, since that's most of what npm does.
+
+## ALGORITHM
+
+To install a package, npm uses the following algorithm:
+
+ install(where, what, family, ancestors)
+ fetch what, unpack to <where>/node_modules/<what>
+ for each dep in what.dependencies
+ resolve dep to precise version
+ for each dep@version in what.dependencies
+ not in <where>/node_modules/<what>/node_modules/*
+ and not in <family>
+ add precise version deps to <family>
+ install(<where>/node_modules/<what>, dep, family)
+
+For this `package{dep}` structure: `A{B,C}, B{C}, C{D}`,
+this algorithm produces:
+
+ A
+ +-- B
+ `-- C
+ `-- D
+
+That is, the dependency from B to C is satisfied by the fact that A
+already caused C to be installed at a higher level.
+
+See npm-folders(1) for a more detailed description of the specific
+folder structures that npm creates.
+
+### Limitations of npm's Install Algorithm
+
+There are some very rare and pathological edge-cases where a cycle can
+cause npm to try to install a never-ending tree of packages. Here is
+the simplest case:
+
+ A -> B -> A' -> B' -> A -> B -> A' -> B' -> A -> ...
+
+where `A` is some version of a package, and `A'` is a different version
+of the same package. Because `B` depends on a different version of `A`
+than the one that is already in the tree, it must install a separate
+copy. The same is true of `A'`, which must install `B'`. Because `B'`
+depends on the original version of `A`, which has been overridden, the
+cycle falls into infinite regress.
+
+To avoid this situation, npm flat-out refuses to install any
+`name@version` that is already present anywhere in the tree of package
+folder ancestors. A more correct, but more complex, solution would be
+to symlink the existing version into the new location. If this ever
+affects a real use-case, it will be investigated.
+
+## SEE ALSO
+
+* npm-folders(1)
+* npm-update(1)
+* npm-link(1)
+* npm-rebuild(1)
+* npm-scripts(1)
+* npm-build(1)
+* npm-config(1)
+* npm-registry(1)
+* npm-folders(1)
+* npm-tag(1)
+* npm-rm(1)
diff --git a/deps/npm/doc/cli/json.md b/deps/npm/doc/cli/json.md
new file mode 100644
index 0000000000..5f6e7ef621
--- /dev/null
+++ b/deps/npm/doc/cli/json.md
@@ -0,0 +1,472 @@
+npm-json(1) -- Specifics of npm's package.json handling
+=======================================================
+
+## DESCRIPTION
+
+This document is all you need to know about what's required in your package.json
+file. It must be actual JSON, not just a JavaScript object literal.
+
+A lot of the behavior described in this document is affected by the config
+settings described in `npm-config(1)`.
+
+## DEFAULT VALUES
+
+npm will default some values based on package contents.
+
+* `"scripts": {"start": "node server.js"}`
+
+ If there is a `server.js` file in the root of your package, then npm
+ will default the `start` command to `node server.js`.
+
+* `"scripts":{"preinstall": "node-waf clean || true; node-waf configure build"}`
+
+ If there is a `wscript` file in the root of your package, npm will
+ default the `preinstall` command to compile using node-waf.
+
+* `"contributors": [...]`
+
+ If there is an `AUTHORS` file in the root of your package, npm will
+ treat each line as a `Name <email> (url)` format, where email and url
+ are optional. Lines which start with a `#` or are blank, will be
+ ignored.
+
+## name
+
+The *most* important things in your package.json are the name and version fields.
+Those are actually required, and your package won't install without
+them. The name and version together form an identifier that is assumed
+to be completely unique. Changes to the package should come along with
+changes to the version.
+
+The name is what your thing is called. Some tips:
+
+* Don't put "js" or "node" in the name. It's assumed that it's js, since you're
+ writing a package.json file, and you can specify the engine using the "engines"
+ field. (See below.)
+* The name ends up being part of a URL, an argument on the command line, and a
+ folder name. Any name with non-url-safe characters will be rejected.
+ Also, it can't start with a dot or an underscore.
+* The name will probably be passed as an argument to require(), so it should
+ be something short, but also reasonably descriptive.
+* You may want to check the npm registry to see if there's something by that name
+ already, before you get too attached to it. http://registry.npmjs.org/
+
+## version
+
+The *most* important things in your package.json are the name and version fields.
+Those are actually required, and your package won't install without
+them. The name and version together form an identifier that is assumed
+to be completely unique. Changes to the package should come along with
+changes to the version.
+
+Version must be parseable by
+[node-semver](https://github.com/isaacs/node-semver), which is bundled
+with npm as a dependency. (`npm install semver` to use it yourself.)
+
+Here's how npm's semver implementation deviates from what's on semver.org:
+
+* Versions can start with "v"
+* A numeric item separated from the main three-number version by a hyphen
+ will be interpreted as a "build" number, and will *increase* the version.
+ But, if the tag is not a number separated by a hyphen, then it's treated
+ as a pre-release tag, and is *less than* the version without a tag.
+ So, `0.1.2-7 > 0.1.2-7-beta > 0.1.2-6 > 0.1.2 > 0.1.2beta`
+
+This is a little bit confusing to explain, but matches what you see in practice
+when people create tags in git like "v1.2.3" and then do "git describe" to generate
+a patch version.
+
+## description
+
+Put a description in it. It's a string. This helps people discover your
+package, as it's listed in `npm search`.
+
+## keywords
+
+Put keywords in it. It's an array of strings. This helps people
+discover your package as it's listed in `npm search`.
+
+## homepage
+
+The url to the project homepage.
+
+**NOTE**: This is *not* the same as "url". If you put a "url" field,
+then the registry will think it's a redirection to your package that has
+been published somewhere else, and spit at you.
+
+Literally. Spit. I'm so not kidding.
+
+## bugs
+
+The url to your project's issue tracker and / or the email address to which
+issues should be reported. These are helpful for people who encounter issues
+with your package.
+
+It should look like this:
+
+ { "url" : "http://github.com/owner/project/issues"
+ , "email" : "project@hostname.com"
+ }
+
+You can specify either one or both values. If you want to provide only a url,
+you can specify the value for "bugs" as a simple string instead of an object.
+
+If a url is provided, it will be used by the `npm bugs` command.
+
+## people fields: author, contributors
+
+The "author" is one person. "contributors" is an array of people. A "person"
+is an object with a "name" field and optionally "url" and "email", like this:
+
+ { "name" : "Barney Rubble"
+ , "email" : "b@rubble.com"
+ , "url" : "http://barnyrubble.tumblr.com/"
+ }
+
+Or you can shorten that all into a single string, and npm will parse it for you:
+
+ "Barney Rubble <b@rubble.com> (http://barnyrubble.tumblr.com/)
+
+Both email and url are optional either way.
+
+npm also sets a top-level "maintainers" field with your npm user info.
+
+## files
+
+The "files" field is an array of files to include in your project. If
+you name a folder in the array, then it will also include the files
+inside that folder. (Unless they would be ignored by another rule.)
+
+You can also provide a ".npmignore" file in the root of your package,
+which will keep files from being included, even if they would be picked
+up by the files array. The ".npmignore" file works just like a
+".gitignore".
+
+## main
+
+The main field is a module ID that is the primary entry point to your program.
+That is, if your package is named `foo`, and a user installs it, and then does
+`require("foo")`, then your main module's exports object will be returned.
+
+This should be a module ID relative to the root of your package folder.
+
+For most modules, it makes the most sense to have a main script and often not
+much else.
+
+## bin
+
+A lot of packages have one or more executable files that they'd like to
+install into the PATH. npm makes this pretty easy (in fact, it uses this
+feature to install the "npm" executable.)
+
+To use this, supply a `bin` field in your package.json which is a map of
+command name to local file name. On install, npm will symlink that file into
+`prefix/bin` for global installs, or `./node_modules/.bin/` for local
+installs.
+
+
+For example, npm has this:
+
+ { "bin" : { "npm" : "./cli.js" } }
+
+So, when you install npm, it'll create a symlink from the `cli.js` script to
+`/usr/local/bin/npm`.
+
+If you have a single executable, and its name should be the name
+of the package, then you can just supply it as a string. For example:
+
+ { "name": "my-program"
+ , "version": "1.2.5"
+ , "bin": "./path/to/program" }
+
+would be the same as this:
+
+ { "name": "my-program"
+ , "version": "1.2.5"
+ , "bin" : { "my-program" : "./path/to/program" } }
+
+## man
+
+Specify either a single file or an array of filenames to put in place for the
+`man` program to find.
+
+If only a single file is provided, then it's installed such that it is the
+result from `man <pkgname>`, regardless of its actual filename. For example:
+
+ { "name" : "foo"
+ , "version" : "1.2.3"
+ , "description" : "A packaged foo fooer for fooing foos"
+ , "main" : "foo.js"
+ , "man" : "./man/doc.1"
+ }
+
+would link the `./man/doc.1` file in such that it is the target for `man foo`
+
+If the filename doesn't start with the package name, then it's prefixed.
+So, this:
+
+ { "name" : "foo"
+ , "version" : "1.2.3"
+ , "description" : "A packaged foo fooer for fooing foos"
+ , "main" : "foo.js"
+ , "man" : [ "./man/foo.1", "./man/bar.1" ]
+ }
+
+will create files to do `man foo` and `man foo-bar`.
+
+Man files must end with a number, and optionally a `.gz` suffix if they are
+compressed. The number dictates which man section the file is installed into.
+
+ { "name" : "foo"
+ , "version" : "1.2.3"
+ , "description" : "A packaged foo fooer for fooing foos"
+ , "main" : "foo.js"
+ , "man" : [ "./man/foo.1", "./man/foo.2" ]
+ }
+
+will create entries for `man foo` and `man 2 foo`
+
+## directories
+
+The CommonJS [Packages](http://wiki.commonjs.org/wiki/Packages/1.0) spec details a
+few ways that you can indicate the structure of your package using a `directories`
+hash. If you look at [npm's package.json](http://registry.npmjs.org/npm/latest),
+you'll see that it has directories for doc, lib, and man.
+
+In the future, this information may be used in other creative ways.
+
+### directories.lib
+
+Tell people where the bulk of your library is. Nothing special is done
+with the lib folder in any way, but it's useful meta info.
+
+### directories.bin
+
+If you specify a "bin" directory, then all the files in that folder will
+be used as the "bin" hash.
+
+If you have a "bin" hash already, then this has no effect.
+
+### directories.man
+
+A folder that is full of man pages. Sugar to generate a "man" array by
+walking the folder.
+
+### directories.doc
+
+Put markdown files in here. Eventually, these will be displayed nicely,
+maybe, someday.
+
+### directories.example
+
+Put example scripts in here. Someday, it might be exposed in some clever way.
+
+## repository
+
+Specify the place where your code lives. This is helpful for people who
+want to contribute. If the git repo is on github, then the `npm docs`
+command will be able to find you.
+
+Do it like this:
+
+ "repository" :
+ { "type" : "git"
+ , "url" : "http://github.com/isaacs/npm.git"
+ }
+
+ "repository" :
+ { "type" : "svn"
+ , "url" : "http://v8.googlecode.com/svn/trunk/"
+ }
+
+The URL should be a publicly available (perhaps read-only) url that can be handed
+directly to a VCS program without any modification. It should not be a url to an
+html project page that you put in your browser. It's for computers.
+
+## scripts
+
+The "scripts" member is an object hash of script commands that are run
+at various times in the lifecycle of your package. The key is the lifecycle
+event, and the value is the command to run at that point.
+
+See `npm-scripts(1)` to find out more about writing package scripts.
+
+## config
+
+A "config" hash can be used to set configuration
+parameters used in package scripts that persist across upgrades. For
+instance, if a package had the following:
+
+ { "name" : "foo"
+ , "config" : { "port" : "8080" } }
+
+and then had a "start" command that then referenced the
+`npm_package_config_port` environment variable, then the user could
+override that by doing `npm config set foo:port 8001`.
+
+See `npm-config(1)` and `npm-scripts(1)` for more on package
+configs.
+
+## dependencies
+
+Dependencies are specified with a simple hash of package name to version
+range. The version range is EITHER a string which has one or more
+space-separated descriptors, OR a range like "fromVersion - toVersion"
+
+**Please do not put test harnesses in your `dependencies` hash.** See
+`devDependencies`, below.
+
+Version range descriptors may be any of the following styles, where "version"
+is a semver compatible version identifier.
+
+* `version` Must match `version` exactly
+* `=version` Same as just `version`
+* `>version` Must be greater than `version`
+* `>=version` etc
+* `<version`
+* `<=version`
+* `~version` See 'Tilde Version Ranges' below
+* `1.2.x` See 'X Version Ranges' below
+* `http://...` See 'URLs as Dependencies' below
+* `*` Matches any version
+* `""` (just an empty string) Same as `*`
+* `version1 - version2` Same as `>=version1 <=version2`.
+* `range1 || range2` Passes if either range1 or range2 are satisfied.
+
+For example, these are all valid:
+
+ { "dependencies" :
+ { "foo" : "1.0.0 - 2.9999.9999"
+ , "bar" : ">=1.0.2 <2.1.2"
+ , "baz" : ">1.0.2 <=2.3.4"
+ , "boo" : "2.0.1"
+ , "qux" : "<1.0.0 || >=2.3.1 <2.4.5 || >=2.5.2 <3.0.0"
+ , "asd" : "http://asdf.com/asdf.tar.gz"
+ , "til" : "~1.2"
+ , "elf" : "~1.2.3"
+ , "two" : "2.x"
+ , "thr" : "3.3.x"
+ }
+ }
+
+### Tilde Version Ranges
+
+A range specifier starting with a tilde `~` character is matched against
+a version in the following fashion.
+
+* The version must be at least as high as the range.
+* The version must be less than the next major revision above the range.
+
+For example, the following are equivalent:
+
+* `"~1.2.3" = ">=1.2.3 <1.3.0"`
+* `"~1.2" = ">=1.2.0 <2.0.0"`
+* `"~1" = ">=1.0.0 <2.0.0"`
+
+### X Version Ranges
+
+An "x" in a version range specifies that the version number must start
+with the supplied digits, but any digit may be used in place of the x.
+
+The following are equivalent:
+
+* `"1.2.x" = ">=1.2.0 <1.3.0"`
+* `"1.x.x" = ">=1.0.0 <2.0.0"`
+* `"1.2" = "1.2.x"`
+* `"1.x" = "1.x.x"`
+* `"1" = "1.x.x"`
+
+You may not supply a comparator with a version containing an x. Any
+digits after the first "x" are ignored.
+
+### URLs as Dependencies
+
+Starting with npm version 0.2.14, you may specify a tarball URL in place
+of a version range.
+
+This tarball will be downloaded and installed locally to your package at
+install time.
+
+## devDependencies
+
+If someone is planning on downloading and using your module in their
+program, then they probably don't want or need to download and build
+the external test or documentation framework that you use.
+
+In this case, it's best to list these additional items in a
+`devDependencies` hash.
+
+These things will be installed whenever the `--dev` configuration flag
+is set. This flag is set automatically when doing `npm link`, and can
+be managed like any other npm configuration param. See `npm-config(1)`
+for more on the topic.
+
+## bundledDependencies
+
+Array of package names that will be bundled when publishing the package.
+
+If this is spelled `"bundleDependencies"`, then that is also honorable.
+
+## engines
+
+You can specify the version of
+node that your stuff works on:
+
+ { "engines" : { "node" : ">=0.1.27 <0.1.30" } }
+
+And, like with dependencies, if you don't specify the version (or if you
+specify "*" as the version), then any version of node will do.
+
+If you specify an "engines" field, then npm will require that "node" be
+somewhere on that list. If "engines" is omitted, then npm will just assume
+that it works on node.
+
+You can also use the "engines" field to specify which versions of npm
+are capable of properly installing your program. For example:
+
+ { "engines" : { "npm" : "~1.0.20" } }
+
+## preferGlobal
+
+If your package is primarily a command-line application that should be
+installed globally, then set this value to `true` to provide a warning
+if it is installed locally.
+
+It doesn't actually prevent users from installing it locally, but it
+does help prevent some confusion if it doesn't work as expected.
+
+## private
+
+If you set `"private": true` in your package.json, then npm will refuse
+to publish it.
+
+This is a way to prevent accidental publication of private repositories.
+If you would like to ensure that a given package is only ever published
+to a speciic registry (for example, an internal registry),
+then use the `publishConfig` hash described below
+to override the `registry` config param at publish-time.
+
+## publishConfig
+
+This is a set of config values that will be used at publish-time. It's
+especially handy if you want to set the tag or registry, so that you can
+ensure that a given package is not tagged with "latest" or published to
+the global public registry by default.
+
+Any config values can be overridden, but of course only "tag" and
+"registry" probably matter for the purposes of publishing.
+
+See `npm-config(1)` to see the list of config options that can be
+overridden.
+
+## SEE ALSO
+
+* npm-semver(1)
+* npm-init(1)
+* npm-version(1)
+* npm-config(1)
+* npm-help(1)
+* npm-faq(1)
+* npm-install(1)
+* npm-publish(1)
+* npm-rm(1)
diff --git a/deps/npm/doc/cli/link.md b/deps/npm/doc/cli/link.md
new file mode 100644
index 0000000000..dd54792e29
--- /dev/null
+++ b/deps/npm/doc/cli/link.md
@@ -0,0 +1,57 @@
+npm-link(1) -- Symlink a package folder
+=======================================
+
+## SYNOPSIS
+
+ npm link (in package folder)
+ npm link <pkgname>
+
+## DESCRIPTION
+
+Package linking is a two-step process.
+
+First, `npm link` in a package folder will create a globally-installed
+symbolic link from `prefix/package-name` to the current folder.
+
+Next, in some other location, `npm link package-name` will create a
+symlink from the local `node_modules` folder to the global symlink.
+
+When creating tarballs for `npm publish`, the linked packages are
+"snapshotted" to their current state by resolving the symbolic links.
+
+This is
+handy for installing your own stuff, so that you can work on it and test it
+iteratively without having to continually rebuild.
+
+For example:
+
+ cd ~/projects/node-redis # go into the package directory
+ npm link # creates global link
+ cd ~/projects/node-bloggy # go into some other package directory.
+ npm link redis # link-install the package
+
+Now, any changes to ~/projects/node-redis will be reflected in
+~/projects/node-bloggy/node_modules/redis/
+
+You may also shortcut the two steps in one. For example, to do the
+above use-case in a shorter way:
+
+ cd ~/projects/node-bloggy # go into the dir of your main project
+ npm link ../node-redis # link the dir of your dependency
+
+The second line is the equivalent of doing:
+
+ (cd ../node-redis; npm link)
+ npm link redis
+
+That is, it first creates a global link, and then links the global
+installation target into your project's `node_modules` folder.
+
+## SEE ALSO
+
+* npm-developers(1)
+* npm-faq(1)
+* npm-json(1)
+* npm-install(1)
+* npm-folders(1)
+* npm-config(1)
diff --git a/deps/npm/doc/cli/list.md b/deps/npm/doc/cli/list.md
new file mode 100644
index 0000000000..596349a815
--- /dev/null
+++ b/deps/npm/doc/cli/list.md
@@ -0,0 +1,55 @@
+npm-ls(1) -- List installed packages
+======================================
+
+## SYNOPSIS
+
+ npm list
+ npm ls
+ npm la
+ npm ll
+
+## DESCRIPTION
+
+This command will print to stdout all the versions of packages that are
+installed, as well as their dependencies, in a tree-structure.
+
+It does not take positional arguments, though you may set config flags
+like with any other command, such as `-g` to list global packages.
+
+It will print out extraneous, missing, and invalid packages.
+
+When run as `ll` or `la`, it shows extended information by default.
+
+## CONFIGURATION
+
+### long
+
+* Default: false
+* Type: Boolean
+
+Show extended information.
+
+### parseable
+
+* Default: false
+* Type: Boolean
+
+Show parseable output instead of tree view.
+
+### global
+
+* Default: false
+* Type: Boolean
+
+List packages in the global install prefix instead of in the current
+project.
+
+## SEE ALSO
+
+* npm-config(1)
+* npm-folders(1)
+* npm-install(1)
+* npm-link(1)
+* npm-prune(1)
+* npm-outdated(1)
+* npm-update(1)
diff --git a/deps/npm/doc/cli/ln.md b/deps/npm/doc/cli/ln.md
new file mode 120000
index 0000000000..243f994145
--- /dev/null
+++ b/deps/npm/doc/cli/ln.md
@@ -0,0 +1 @@
+link.md \ No newline at end of file
diff --git a/deps/npm/doc/cli/ls.md b/deps/npm/doc/cli/ls.md
new file mode 120000
index 0000000000..eaad7acae2
--- /dev/null
+++ b/deps/npm/doc/cli/ls.md
@@ -0,0 +1 @@
+list.md \ No newline at end of file
diff --git a/deps/npm/doc/cli/npm.md b/deps/npm/doc/cli/npm.md
new file mode 100644
index 0000000000..cd3360d7c5
--- /dev/null
+++ b/deps/npm/doc/cli/npm.md
@@ -0,0 +1,155 @@
+npm(1) -- node package manager
+==============================
+
+## SYNOPSIS
+
+ npm <command> [args]
+
+## VERSION
+
+@VERSION@
+
+## DESCRIPTION
+
+npm is the package manager for the Node JavaScript platform. It puts
+modules in place so that node can find them, and manages dependency
+conflicts intelligently.
+
+It is extremely configurable to support a wide variety of use cases.
+Most commonly, it is used to publish, discover, install, and develop node
+programs.
+
+Run `npm help` to get a list of available commands.
+
+## INTRODUCTION
+
+You probably got npm because you want to install stuff.
+
+Use `npm install blerg` to install the latest version of "blerg". Check out
+`npm-install(1)` for more info. It can do a lot of stuff.
+
+Use the `npm search` command to show everything that's available.
+Use `npm ls` to show everything you've installed.
+
+## DIRECTORIES
+
+See `npm-folders(1)` to learn about where npm puts stuff.
+
+In particular, npm has two modes of operation:
+
+* global mode:
+ npm installs packages into the install prefix at
+ `prefix/lib/node_modules` and bins are installed in `prefix/bin`.
+* local mode:
+ npm installs packages into the current project directory, which
+ defaults to the current working directory. Packages are installed to
+ `./node_modules`, and bins are installed to `./node_modules/.bin`.
+
+Local mode is the default. Use `--global` or `-g` on any command to
+operate in global mode instead.
+
+## DEVELOPER USAGE
+
+If you're using npm to develop and publish your code, check out the
+following help topics:
+
+* json:
+ Make a package.json file. See `npm-json(1)`.
+* link:
+ For linking your current working code into Node's path, so that you
+ don't have to reinstall every time you make a change. Use
+ `npm link` to do this.
+* install:
+ It's a good idea to install things if you don't need the symbolic link.
+ Especially, installing other peoples code from the registry is done via
+ `npm install`
+* adduser:
+ Create an account or log in. Creditials are stored in the
+ user config file.
+* publish:
+ Use the `npm publish` command to upload your code to the registry.
+
+## CONFIGURATION
+
+npm is extremely configurable. It reads its configuration options from
+5 places.
+
+* Command line switches:
+ Set a config with `--key val`. All keys take a value, even if they
+ are booleans (the config parser doesn't know what the options are at
+ the time of parsing.) If no value is provided, then the option is set
+ to boolean `true`.
+* Environment Variables:
+ Set any config by prefixing the name in an environment variable with
+ `npm_config_`. For example, `export npm_config_key=val`.
+* User Configs:
+ The file at $HOME/.npmrc is an ini-formatted list of configs. If
+ present, it is parsed. If the `userconfig` option is set in the cli
+ or env, then that will be used instead.
+* Global Configs:
+ The file found at ../etc/npmrc (from the node executable, by default
+ this resolves to /usr/local/etc/npmrc) will be parsed if it is found.
+ If the `globalconfig` option is set in the cli, env, or user config,
+ then that file is parsed instead.
+* Defaults:
+ npm's default configuration options are defined in
+ lib/utils/config-defs.js. These must not be changed.
+
+See `npm-config(1)` for much much more information.
+
+## CONTRIBUTIONS
+
+Patches welcome!
+
+* code:
+ Read through `npm-coding-style(1)` if you plan to submit code.
+ You don't have to agree with it, but you do have to follow it.
+* docs:
+ If you find an error in the documentation, edit the appropriate markdown
+ file in the "doc" folder. (Don't worry about generating the man page.)
+
+Contributors are listed in npm's `package.json` file. You can view them
+easily by doing `npm view npm contributors`.
+
+If you would like to contribute, but don't know what to work on, check
+the issues list or ask on the mailing list.
+
+* <http://github.com/isaacs/npm/issues>
+* <npm-@googlegroups.com>
+
+## BUGS
+
+When you find issues, please report them:
+
+* web:
+ <http://github.com/isaacs/npm/issues>
+* email:
+ <npm-@googlegroups.com>
+
+Be sure to include *all* of the output from the npm command that didn't work
+as expected. The `npm-debug.log` file is also helpful to provide.
+
+You can also look for isaacs in #node.js on irc://irc.freenode.net. He
+will no doubt tell you to put the output in a gist or email.
+
+## HISTORY
+
+See npm-changelog(1)
+
+## AUTHOR
+
+[Isaac Z. Schlueter](http://blog.izs.me/) ::
+[isaacs](https://github.com/isaacs/) ::
+[@izs](http://twitter.com/izs) ::
+<i@izs.me>
+
+## SEE ALSO
+
+* npm-help(1)
+* npm-faq(1)
+* README
+* npm-json(1)
+* npm-install(1)
+* npm-config(1)
+* npm-index(1)
+* npm(3)
diff --git a/deps/npm/doc/cli/outdated.md b/deps/npm/doc/cli/outdated.md
new file mode 100644
index 0000000000..78df4a8b30
--- /dev/null
+++ b/deps/npm/doc/cli/outdated.md
@@ -0,0 +1,17 @@
+npm-outdated(1) -- Check for outdated packages
+==============================================
+
+## SYNOPSIS
+
+ npm outdated [<name> [<name> ...]]
+
+## DESCRIPTION
+
+This command will check the registry to see if any (or, specific) installed
+packages are currently outdated.
+
+## SEE ALSO
+
+* npm-update(1)
+* npm-registry(1)
+* npm-folders(1)
diff --git a/deps/npm/doc/cli/owner.md b/deps/npm/doc/cli/owner.md
new file mode 100644
index 0000000000..8365da379e
--- /dev/null
+++ b/deps/npm/doc/cli/owner.md
@@ -0,0 +1,32 @@
+npm-owner(1) -- Manage package owners
+=====================================
+
+## SYNOPSIS
+
+ npm owner ls <package name>
+ npm owner add <user> <package name>
+ npm owner rm <user> <package name>
+
+## DESCRIPTION
+
+Manage ownership of published packages.
+
+* ls:
+ List all the users who have access to modify a package and push new versions.
+ Handy when you need to know who to bug for help.
+* add:
+ Add a new user as a maintainer of a package. This user is enabled to modify
+ metadata, publish new versions, and add other owners.
+* rm:
+ Remove a user from the package owner list. This immediately revokes their
+ privileges.
+
+Note that there is only one level of access. Either you can modify a package,
+or you can't. Future versions may contain more fine-grained access levels, but
+that is not implemented at this time.
+
+## SEE ALSO
+
+* npm-publish(1)
+* npm-registry(1)
+* npm-adduser(1)
diff --git a/deps/npm/doc/cli/pack.md b/deps/npm/doc/cli/pack.md
new file mode 100644
index 0000000000..98d8c81c50
--- /dev/null
+++ b/deps/npm/doc/cli/pack.md
@@ -0,0 +1,25 @@
+npm-pack(1) -- Create a tarball from a package
+==============================================
+
+## SYNOPSIS
+
+ npm pack [<pkg> [<pkg> ...]]
+
+## DESCRIPTION
+
+For anything that's installable (that is, a package folder, tarball,
+tarball url, name@tag, name@version, or name), this command will fetch
+it to the cache, and then copy the tarball to the current working
+directory as `<name>-<version>.tgz`, and then write the filenames out to
+stdout.
+
+If the same package is specified multiple times, then the file will be
+overwritten the second time.
+
+If no arguments are supplied, then npm packs the current package folder.
+
+## SEE ALSO
+
+* npm-cache(1)
+* npm-publish(1)
+* npm-config(1)
diff --git a/deps/npm/doc/cli/prefix.md b/deps/npm/doc/cli/prefix.md
new file mode 100644
index 0000000000..f6247cab12
--- /dev/null
+++ b/deps/npm/doc/cli/prefix.md
@@ -0,0 +1,17 @@
+npm-prefix(1) -- Display prefix
+===============================
+
+## SYNOPSIS
+
+ npm prefix
+
+## DESCRIPTION
+
+Print the prefix to standard out.
+
+## SEE ALSO
+
+* npm-root(1)
+* npm-bin(1)
+* npm-folders(1)
+* npm-config(1)
diff --git a/deps/npm/doc/cli/prune.md b/deps/npm/doc/cli/prune.md
new file mode 100644
index 0000000000..8c4b957e6c
--- /dev/null
+++ b/deps/npm/doc/cli/prune.md
@@ -0,0 +1,21 @@
+npm-prune(1) -- Remove extraneous packages
+==========================================
+
+## SYNOPSIS
+
+ npm prune [<name> [<name ...]]
+
+## DESCRIPTION
+
+This command removes "extraneous" packages. If a package name is
+provided, then only packages matching one of the supplied names are
+removed.
+
+Extraneous packages are packages that are not listed on the parent
+package's dependencies list.
+
+## SEE ALSO
+
+* npm-rm(1)
+* npm-folders(1)
+* npm-list(1)
diff --git a/deps/npm/doc/cli/publish.md b/deps/npm/doc/cli/publish.md
new file mode 100644
index 0000000000..621932f07c
--- /dev/null
+++ b/deps/npm/doc/cli/publish.md
@@ -0,0 +1,30 @@
+npm-publish(1) -- Publish a package
+===================================
+
+
+## SYNOPSIS
+
+ npm publish <tarball>
+ npm publish <folder>
+
+## DESCRIPTION
+
+Publishes a package to the registry so that it can be installed by name.
+
+* `<folder>`:
+ A folder containing a package.json file
+
+* `<tarball>`:
+ A url or file path to a gzipped tar archive containing a single folder
+ with a package.json file inside.
+
+Fails if the package name and version combination already exists in
+the registry. Overwrites when the "--force" flag is set.
+
+## SEE ALSO
+
+* npm-registry(1)
+* npm-adduser(1)
+* npm-owner(1)
+* npm-deprecate(1)
+* npm-tag(1)
diff --git a/deps/npm/doc/cli/rebuild.md b/deps/npm/doc/cli/rebuild.md
new file mode 100644
index 0000000000..6985a7bdd7
--- /dev/null
+++ b/deps/npm/doc/cli/rebuild.md
@@ -0,0 +1,20 @@
+npm-rebuild(1) -- Rebuild a package
+===================================
+
+## SYNOPSIS
+
+ npm rebuild [<name> [<name> ...]]
+
+* `<name>`:
+ The package to rebuild
+
+## DESCRIPTION
+
+This command runs the `npm build` command on the matched folders. This is useful
+when you install a new version of node, and must recompile all your C++ addons with
+the new binary.
+
+## SEE ALSO
+
+* npm-build(1)
+* npm-install(1)
diff --git a/deps/npm/doc/cli/registry.md b/deps/npm/doc/cli/registry.md
new file mode 100644
index 0000000000..8073ea0820
--- /dev/null
+++ b/deps/npm/doc/cli/registry.md
@@ -0,0 +1,92 @@
+npm-registry(1) -- The JavaScript Package Registry
+==================================================
+
+## DESCRIPTION
+
+To resolve packages by name and version, npm talks to a registry website
+that implements the CommonJS Package Registry specification for reading
+package info.
+
+Additionally, npm's package registry implementation supports several
+write APIs as well, to allow for publishing packages and managing user
+account information.
+
+The official public npm registry is at <http://registry.npmjs.org/>. It
+is powered by a CouchDB database at
+<http://isaacs.couchone.com/registry>. The code for the couchapp is
+available at <http://github.com/isaacs/npmjs.org>. npm user accounts
+are CouchDB users, stored in the <http://isaacs.couchone.com/_users>
+database.
+
+The registry URL is supplied by the `registry` config parameter. See
+`npm-config(1)` for more on managing npm's configuration.
+
+## Can I run my own private registry?
+
+Yes!
+
+The easiest way is to replicate the couch database, and use the same (or
+similar) design doc to implement the APIs.
+
+If you set up continuous replication from the official CouchDB, and then
+set your internal CouchDB as the registry config, then you'll be able
+to read any published packages, in addition to your private ones, and by
+default will only publish internally. If you then want to publish a
+package for the whole world to see, you can simply override the
+`--registry` config for that command.
+
+## I don't want my package published in the official registry. It's private.
+
+Set `"private": true` in your package.json to prevent it from being
+published at all, or
+`"publishConfig":{"registry":"http://my-internal-registry.local"}`
+to force it to be published only to your internal registry.
+
+See `npm-json(1)` for more info on what goes in the package.json file.
+
+## Will you replicate from my registry into the public one?
+
+No. If you want things to be public, then publish them into the public
+registry using npm. What little security there is would be for nought
+otherwise.
+
+## Do I have to use couchdb to build a registry that npm can talk to?
+
+No, but it's way easier.
+
+## I published something elsewhere, and want to tell the npm registry about it.
+
+That is supported, but not using the npm client. You'll have to get
+your hands dirty and do some HTTP. The request looks something like
+this:
+
+ PUT /my-foreign-package
+ content-type:application/json
+ accept:application/json
+ authorization:Basic $base_64_encoded
+
+ { "name":"my-foreign-package"
+ , "maintainers":["owner","usernames"]
+ , "description":"A package that is hosted elsewhere"
+ , "keywords":["nih","my cheese smells the best"]
+ , "url":"http://my-different-registry.com/blerg/my-local-package"
+ }
+
+(Keywords and description are optional, but recommended. Name,
+maintainers, and url are required.)
+
+Then, when a user tries to install "my-foreign-package", it'll redirect
+to your registry. If that doesn't resolve to a valid package entry,
+then it'll fail, so please make sure that you understand the spec, and
+ask for help on the <npm-@googlegroups.com> mailing list.
+
+## Is there a website or something to see package docs and such?
+
+No, but such a thing is planned, and a tiny bit developed.
+
+Stay tuned!
+
+## SEE ALSO
+
+* npm-config(1)
+* npm-developers(1)
diff --git a/deps/npm/doc/cli/removing-npm.md b/deps/npm/doc/cli/removing-npm.md
new file mode 100644
index 0000000000..bedd28a2fa
--- /dev/null
+++ b/deps/npm/doc/cli/removing-npm.md
@@ -0,0 +1,54 @@
+npm-removal(1) -- Cleaning the Slate
+====================================
+
+## SYNOPSIS
+
+So sad to see you go.
+
+ sudo npm uninstall npm -g
+
+Or, if that fails, get the npm source code, and do:
+
+ sudo make uninstall
+
+## More Severe Uninstalling
+
+Usually, the above instructions are sufficient. That will remove
+npm, but leave behind anything you've installed.
+
+If that doesn't work, or if you require more drastic measures,
+continue reading.
+
+Note that this is only necessary for globally-installed packages. Local
+installs are completely contained within a project's `node_modules`
+folder. Delete that folder, and everything is gone (unless a package's
+install script is particularly ill-behaved).
+
+This assumes that you installed node and npm in the default place. If
+you configured node with a different `--prefix`, or installed npm with a
+different prefix setting, then adjust the paths accordingly, replacing
+`/usr/local` with your install prefix.
+
+To remove everything npm-related manually:
+
+ rm -rf /usr/local/{lib/node{,/.npm,_modules},bin,share/man}/npm*
+
+If you installed things *with* npm, then your best bet is to uninstall
+them with npm first, and then install them again once you have a
+proper install. This can help find any symlinks that are lying
+around:
+
+ ls -laF /usr/local/{lib/node{,/.npm},bin,share/man} | grep npm
+
+Prior to version 0.3, npm used shim files for executables and node
+modules. To track those down, you can do the following:
+
+ find /usr/local/{lib/node,bin} -exec grep -l npm \{\} \; ;
+
+(This is also in the README file.)
+
+## SEE ALSO
+
+* README
+* npm-rm(1)
+* npm-prune(1)
diff --git a/deps/npm/doc/cli/restart.md b/deps/npm/doc/cli/restart.md
new file mode 100644
index 0000000000..6139dbeefb
--- /dev/null
+++ b/deps/npm/doc/cli/restart.md
@@ -0,0 +1,22 @@
+npm-restart(1) -- Start a package
+=================================
+
+## SYNOPSIS
+
+ npm restart <name>
+
+## DESCRIPTION
+
+This runs a package's "restart" script, if one was provided.
+Otherwise it runs package's "stop" script, if one was provided, and then
+the "start" script.
+
+If no version is specified, then it restarts the "active" version.
+
+## SEE ALSO
+
+* npm-run-script(1)
+* npm-scripts(1)
+* npm-test(1)
+* npm-start(1)
+* npm-stop(1)
diff --git a/deps/npm/doc/cli/rm.md b/deps/npm/doc/cli/rm.md
new file mode 120000
index 0000000000..32d3b511f9
--- /dev/null
+++ b/deps/npm/doc/cli/rm.md
@@ -0,0 +1 @@
+uninstall.md \ No newline at end of file
diff --git a/deps/npm/doc/cli/root.md b/deps/npm/doc/cli/root.md
new file mode 100644
index 0000000000..3e4199541e
--- /dev/null
+++ b/deps/npm/doc/cli/root.md
@@ -0,0 +1,17 @@
+npm-root(1) -- Display npm root
+===============================
+
+## SYNOPSIS
+
+ npm root
+
+## DESCRIPTION
+
+Print the effective `node_modules` folder to standard out.
+
+## SEE ALSO
+
+* npm-prefix(1)
+* npm-bin(1)
+* npm-folders(1)
+* npm-config(1)
diff --git a/deps/npm/doc/cli/run-script.md b/deps/npm/doc/cli/run-script.md
new file mode 100644
index 0000000000..41ef5e7872
--- /dev/null
+++ b/deps/npm/doc/cli/run-script.md
@@ -0,0 +1,21 @@
+npm-run-script(1) -- Run arbitrary package scripts
+==================================================
+
+## SYNOPSIS
+
+ npm run-script <script> <name>
+
+## DESCRIPTION
+
+This runs an arbitrary command from a package's "scripts" object.
+
+It is used by the test, start, restart, and stop commands, but can be
+called directly, as well.
+
+## SEE ALSO
+
+* npm-scripts(1)
+* npm-test(1)
+* npm-start(1)
+* npm-restart(1)
+* npm-stop(1)
diff --git a/deps/npm/doc/cli/scripts.md b/deps/npm/doc/cli/scripts.md
new file mode 100644
index 0000000000..64b3ec41a0
--- /dev/null
+++ b/deps/npm/doc/cli/scripts.md
@@ -0,0 +1,182 @@
+npm-scripts(1) -- How npm handles the "scripts" field
+=====================================================
+
+## DESCRIPTION
+
+npm supports the "scripts" member of the package.json script, for the
+following scripts:
+
+* preinstall:
+ Run BEFORE the package is installed
+* install, postinstall:
+ Run AFTER the package is installed.
+* preuninstall, uninstall:
+ Run BEFORE the package is uninstalled.
+* postuninstall:
+ Run AFTER the package is uninstalled.
+* preupdate:
+ Run BEFORE the package is updated with the update command.
+* update, postupdate:
+ Run AFTER the package is updated with the update command.
+* prepublish:
+ Run BEFORE the package is published.
+* publish, postpublish:
+ Run AFTER the package is published.
+* pretest, test, posttest:
+ Run by the `npm test` command.
+* prestop, stop, poststop:
+ Run by the `npm stop` command.
+* prestart, start, poststart:
+ Run by the `npm start` command.
+* prerestart, restart, postrestart:
+ Run by the `npm restart` command. Note: `npm restart` will run the
+ stop and start scripts if no `restart` script is provided.
+
+Additionally, arbitrary scrips can be run by doing
+`npm run-script <stage> <pkg>`.
+
+## DEFAULT VALUES
+
+npm will default some script values based on package contents.
+
+* `"start": "node server.js"`:
+
+ If there is a `server.js` file in the root of your package, then npm
+ will default the `start` command to `node server.js`.
+
+* `"preinstall": "node-waf clean || true; node-waf configure build"`:
+
+ If there is a `wscript` file in the root of your package, npm will
+ default the `preinstall` command to compile using node-waf.
+
+## USER
+
+If npm was invoked with root privileges, then it will change the uid to
+the user account or uid specified by the `user` config, which defaults
+to `nobody`. Set the `unsafe-perm` flag to run scripts with root
+privileges.
+
+## ENVIRONMENT
+
+Package scripts run in an environment where many pieces of information are
+made available regarding the setup of npm and the current state of the
+process.
+
+### package.json vars
+
+The package.json fields are tacked onto the `npm_package_` prefix. So, for
+instance, if you had `{"name":"foo", "version":"1.2.5"}` in your package.json
+file, then your package scripts would have the `npm_package_name` environment
+variable set to "foo", and the `npm_package_version` set to "1.2.5"
+
+### configuration
+
+Configuration parameters are put in the environment with the `npm_config_`
+prefix. For instance, you can view the effective `root` config by checking the
+`npm_config_root` environment variable.
+
+### Special: package.json "config" hash
+
+The package.json "config" keys are overwritten in the environment if
+there is a config param of `<name>[@<version>]:<key>`. For example, if
+the package.json has this:
+
+ { "name" : "foo"
+ , "config" : { "port" : "8080" }
+ , "scripts" : { "start" : "node server.js" } }
+
+and the server.js is this:
+
+ http.createServer(...).listen(process.env.npm_package_config_port)
+
+then the user could change the behavior by doing:
+
+ npm config set foo:port 80
+
+### current lifecycle event
+
+Lastly, the `npm_lifecycle_event` environment variable is set to whichever
+stage of the cycle is being executed. So, you could have a single script used
+for different parts of the process which switches based on what's currently
+happening.
+
+
+Objects are flattened following this format, so if you had
+`{"scripts":{"install":"foo.js"}}` in your package.json, then you'd see this
+in the script:
+
+ process.env.npm_package_scripts_install === "foo.js"
+
+## EXAMPLES
+
+For example, if your package.json contains this:
+
+ { "scripts" :
+ { "install" : "scripts/install.js"
+ , "postinstall" : "scripts/install.js"
+ , "uninstall" : "scripts/uninstall.js"
+ }
+ }
+
+then the `scripts/install.js` will be called for the install, post-install,
+stages of the lifecycle, and the `scripts/uninstall.js` would be
+called when the package is uninstalled. Since `scripts/install.js` is running
+for three different phases, it would be wise in this case to look at the
+`npm_lifecycle_event` environment variable.
+
+If you want to run a make command, you can do so. This works just fine:
+
+ { "scripts" :
+ { "preinstall" : "./configure"
+ , "install" : "make && make install"
+ , "test" : "make test"
+ }
+ }
+
+## EXITING
+
+Scripts are run by passing the line as a script argument to `sh`.
+
+If the script exits with a code other than 0, then this will abort the
+process.
+
+Note that these script files don't have to be nodejs or even javascript
+programs. They just have to be some kind of executable file.
+
+## HOOK SCRIPTS
+
+If you want to run a specific script at a specific lifecycle event for ALL
+packages, then you can use a hook script.
+
+Place an executable file at `node_modules/.hooks/{eventname}`, and it'll get
+run for all packages when they are going through that point in the package
+lifecycle for any packages installed in that root.
+
+Hook scripts are run exactly the same way as package.json scripts. That is,
+they are in a separate child process, with the env described above.
+
+## BEST PRACTICES
+
+* Don't exit with a non-zero error code unless you *really* mean it.
+ Except for uninstall scripts, this will cause the npm action
+ to fail, and potentially be rolled back. If the failure is minor or
+ only will prevent some optional features, then it's better to just
+ print a warning and exit successfully.
+* Try not to use scripts to do what npm can do for you. Read through
+ `npm-json(1)` to see all the things that you can specify and enable
+ by simply describing your package appropriately. In general, this will
+ lead to a more robust and consistent state.
+* Inspect the env to determine where to put things. For instance, if
+ the `npm_config_binroot` environ is set to `/home/user/bin`, then don't
+ try to install executables into `/usr/local/bin`. The user probably
+ set it up that way for a reason.
+* Don't prefix your script commands with "sudo". If root permissions are
+ required for some reason, then it'll fail with that error, and the user
+ will sudo the npm command in question.
+
+## SEE ALSO
+
+* npm-run-script(1)
+* npm-json(1)
+* npm-developers(1)
+* npm-install(1)
diff --git a/deps/npm/doc/cli/search.md b/deps/npm/doc/cli/search.md
new file mode 100644
index 0000000000..3b15e9b073
--- /dev/null
+++ b/deps/npm/doc/cli/search.md
@@ -0,0 +1,39 @@
+npm-search(1) -- Search for packages
+====================================
+
+## SYNOPSIS
+
+ npm search [search terms ...]
+
+## DESCRIPTION
+
+Search the registry for packages matching the search terms.
+
+## CONFIGURATION
+
+### description
+
+* Default: true
+* Type: Boolean
+
+Show the description in `npm search`
+
+### searchopts
+
+* Default: ""
+* Type: String
+
+Space-separated options that are always passed to search.
+
+### searchexclude
+
+* Default: ""
+* Type: String
+
+Space-separated options that limit the results from search.
+
+## SEE ALSO
+
+* npm-registry(1)
+* npm-config(1)
+* npm-view(1)
diff --git a/deps/npm/doc/cli/semver.md b/deps/npm/doc/cli/semver.md
new file mode 100644
index 0000000000..7eb2240639
--- /dev/null
+++ b/deps/npm/doc/cli/semver.md
@@ -0,0 +1,130 @@
+npm-semver(1) -- The semantic versioner for npm
+===============================================
+
+## SYNOPSIS
+
+The npm semantic versioning utility.
+
+## DESCRIPTION
+
+As a node module:
+
+ $ npm install semver
+
+ semver.valid('1.2.3') // true
+ semver.valid('a.b.c') // false
+ semver.clean(' =v1.2.3 ') // '1.2.3'
+ semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
+ semver.gt('1.2.3', '9.8.7') // false
+ semver.lt('1.2.3', '9.8.7') // true
+
+As a command-line utility:
+
+ $ npm install semver -g
+ $ semver -h
+
+ Usage: semver -v <version> [-r <range>]
+ Test if version(s) satisfy the supplied range(s),
+ and sort them.
+
+ Multiple versions or ranges may be supplied.
+
+ Program exits successfully if any valid version satisfies
+ all supplied ranges, and prints all satisfying versions.
+
+ If no versions are valid, or ranges are not satisfied,
+ then exits failure.
+
+ Versions are printed in ascending order, so supplying
+ multiple versions to the utility will just sort them.
+
+## Versions
+
+A version is the following things, in this order:
+
+* a number (Major)
+* a period
+* a number (minor)
+* a period
+* a number (patch)
+* OPTIONAL: a hyphen, followed by a number (build)
+* OPTIONAL: a collection of pretty much any non-whitespace characters
+ (tag)
+
+A leading `"="` or `"v"` character is stripped off and ignored.
+
+## Comparisons
+
+The ordering of versions is done using the following algorithm, given
+two versions and asked to find the greater of the two:
+
+* If the majors are numerically different, then take the one
+ with a bigger major number. `2.3.4 > 1.3.4`
+* If the minors are numerically different, then take the one
+ with the bigger minor number. `2.3.4 > 2.2.4`
+* If the patches are numerically different, then take the one with the
+ bigger patch number. `2.3.4 > 2.3.3`
+* If only one of them has a build number, then take the one with the
+ build number. `2.3.4-0 > 2.3.4`
+* If they both have build numbers, and the build numbers are numerically
+ different, then take the one with the bigger build number.
+ `2.3.4-10 > 2.3.4-9`
+* If only one of them has a tag, then take the one without the tag.
+ `2.3.4 > 2.3.4-beta`
+* If they both have tags, then take the one with the lexicographically
+ larger tag. `2.3.4-beta > 2.3.4-alpha`
+* At this point, they're equal.
+
+## Ranges
+
+The following range styles are supported:
+
+* `>1.2.3` Greater than a specific version.
+* `<1.2.3` Less than
+* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
+* `~1.2.3` := `>=1.2.3 <1.3.0`
+* `~1.2` := `>=1.2.0 <2.0.0`
+* `~1` := `>=1.0.0 <2.0.0`
+* `1.2.x` := `>=1.2.0 <1.3.0`
+* `1.x` := `>=1.0.0 <2.0.0`
+
+Ranges can be joined with either a space (which implies "and") or a
+`||` (which implies "or").
+
+## Functions
+
+* valid(v): Return the parsed version, or null if it's not valid.
+* inc(v, release): Return the version incremented by the release type
+ (major, minor, patch, or build), or null if it's not valid.
+
+### Comparison
+
+* gt(v1, v2): `v1 > v2`
+* gte(v1, v2): `v1 >= v2`
+* lt(v1, v2): `v1 < v2`
+* lte(v1, v2): `v1 <= v2`
+* eq(v1, v2): `v1 == v2` This is true if they're logically equivalent,
+ even if they're not the exact same string. You already know how to
+ compare strings.
+* neq(v1, v2): `v1 != v2` The opposite of eq.
+* cmp(v1, comparator, v2): Pass in a comparison string, and it'll call
+ the corresponding function above. `"==="` and `"!=="` do simple
+ string comparison, but are included for completeness. Throws if an
+ invalid comparison string is provided.
+* compare(v1, v2): Return 0 if v1 == v2, or 1 if v1 is greater, or -1 if
+ v2 is greater. Sorts in ascending order if passed to Array.sort().
+* rcompare(v1, v2): The reverse of compare. Sorts an array of versions
+ in descending order when passed to Array.sort().
+
+
+### Ranges
+
+* validRange(range): Return the valid range or null if it's not valid
+* satisfies(version, range): Return true if the version satisfies the
+ range.
+* maxSatisfying(versions, range): Return the highest version in the list
+ that satisfies the range, or null if none of them do.
+
+## SEE ALSO
+
+* npm-json(1)
diff --git a/deps/npm/doc/cli/set.md b/deps/npm/doc/cli/set.md
new file mode 120000
index 0000000000..3dc8737366
--- /dev/null
+++ b/deps/npm/doc/cli/set.md
@@ -0,0 +1 @@
+config.md \ No newline at end of file
diff --git a/deps/npm/doc/cli/star.md b/deps/npm/doc/cli/star.md
new file mode 100644
index 0000000000..5c076b3c3c
--- /dev/null
+++ b/deps/npm/doc/cli/star.md
@@ -0,0 +1,22 @@
+npm-star(1) -- Mark your favorite packages
+==========================================
+
+## SYNOPSIS
+
+ npm star <pkgname> [<pkg>, ...]
+ npm unstar <pkgname> [<pkg>, ...]
+
+## DESCRIPTION
+
+"Starring" a package means that you have some interest in it. It's
+a vaguely positive way to show that you care.
+
+"Unstarring" is the same thing, but in reverse.
+
+It's a boolean thing. Starring repeatedly has no additional effect.
+
+## SEE ALSO
+
+* npm-view(1)
+* npm-whoami(1)
+* npm-adduser(1)
diff --git a/deps/npm/doc/cli/start.md b/deps/npm/doc/cli/start.md
new file mode 100644
index 0000000000..cc897bbc09
--- /dev/null
+++ b/deps/npm/doc/cli/start.md
@@ -0,0 +1,18 @@
+npm-start(1) -- Start a package
+===============================
+
+## SYNOPSIS
+
+ npm start <name>
+
+## DESCRIPTION
+
+This runs a package's "start" script, if one was provided.
+
+## SEE ALSO
+
+* npm-run-script(1)
+* npm-scripts(1)
+* npm-test(1)
+* npm-restart(1)
+* npm-stop(1)
diff --git a/deps/npm/doc/cli/stop.md b/deps/npm/doc/cli/stop.md
new file mode 100644
index 0000000000..1ab3e9975d
--- /dev/null
+++ b/deps/npm/doc/cli/stop.md
@@ -0,0 +1,18 @@
+npm-stop(1) -- Stop a package
+=============================
+
+## SYNOPSIS
+
+ npm stop <name>
+
+## DESCRIPTION
+
+This runs a package's "stop" script, if one was provided.
+
+## SEE ALSO
+
+* npm-run-script(1)
+* npm-scripts(1)
+* npm-test(1)
+* npm-start(1)
+* npm-restart(1)
diff --git a/deps/npm/doc/cli/submodule.md b/deps/npm/doc/cli/submodule.md
new file mode 100644
index 0000000000..13ab1edd95
--- /dev/null
+++ b/deps/npm/doc/cli/submodule.md
@@ -0,0 +1,28 @@
+npm-submodule(1) -- Add a package as a git submodule
+====================================================
+
+## SYNOPSIS
+
+ npm submodule <pkg>
+
+## DESCRIPTION
+
+If the specified package has a git repository url in its package.json
+description, then this command will add it as a git submodule at
+`node_modules/<pkg name>`.
+
+This is a convenience only. From then on, it's up to you to manage
+updates by using the appropriate git commands. npm will stubbornly
+refuse to update, modify, or remove anything with a `.git` subfolder
+in it.
+
+This command also does not install missing dependencies, if the package
+does not include them in its git repository. If `npm ls` reports that
+things are missing, you can either install, link, or submodule them yourself,
+or you can do `npm explore <pkgname> -- npm install` to install the
+dependencies into the submodule folder.
+
+## SEE ALSO
+
+* npm-json(1)
+* git help submodule
diff --git a/deps/npm/doc/cli/tag.md b/deps/npm/doc/cli/tag.md
new file mode 100644
index 0000000000..2f1ca4373e
--- /dev/null
+++ b/deps/npm/doc/cli/tag.md
@@ -0,0 +1,17 @@
+npm-tag(1) -- Tag a published version
+=====================================
+
+## SYNOPSIS
+
+ npm tag <name>@<version> [<tag>]
+
+## DESCRIPTION
+
+Tags the specified version of the package with the specified tag, or the
+`--tag` config if not specified.
+
+## SEE ALSO
+
+* npm-publish(1)
+* npm-registry(1)
+* npm-config(1)
diff --git a/deps/npm/doc/cli/test.md b/deps/npm/doc/cli/test.md
new file mode 100644
index 0000000000..bc634efbf3
--- /dev/null
+++ b/deps/npm/doc/cli/test.md
@@ -0,0 +1,21 @@
+npm-test(1) -- Test a package
+=============================
+
+## SYNOPSIS
+
+ npm test <name>
+
+## DESCRIPTION
+
+This runs a package's "test" script, if one was provided.
+
+To run tests as a condition of installation, set the `npat` config to
+true.
+
+## SEE ALSO
+
+* npm-run-script(1)
+* npm-scripts(1)
+* npm-start(1)
+* npm-restart(1)
+* npm-stop(1)
diff --git a/deps/npm/doc/cli/uninstall.md b/deps/npm/doc/cli/uninstall.md
new file mode 100644
index 0000000000..f7f743fae2
--- /dev/null
+++ b/deps/npm/doc/cli/uninstall.md
@@ -0,0 +1,19 @@
+npm-rm(1) -- Remove a package
+=============================
+
+## SYNOPSIS
+
+ npm rm <name>
+ npm uninstall <name>
+
+## DESCRIPTION
+
+This uninstalls a package, completely removing everything npm installed
+on its behalf.
+
+## SEE ALSO
+
+* npm-prune(1)
+* npm-install(1)
+* npm-folders(1)
+* npm-config(1)
diff --git a/deps/npm/doc/cli/unpublish.md b/deps/npm/doc/cli/unpublish.md
new file mode 100644
index 0000000000..0f4446c4ed
--- /dev/null
+++ b/deps/npm/doc/cli/unpublish.md
@@ -0,0 +1,32 @@
+npm-unpublish(1) -- Remove a package from the registry
+======================================================
+
+## SYNOPSIS
+
+ npm unpublish <name>[@<version>]
+
+## WARNING
+
+**It is generally considered bad behavior to remove versions of a library
+that others are depending on!**
+
+Consider using the `deprecate` command
+instead, if your intent is to encourage users to upgrade.
+
+There is plenty of room on the registry.
+
+## DESCRIPTION
+
+This removes a package version from the registry, deleting its
+entry and removing the tarball.
+
+If no version is specified, or if all versions are removed then
+the root package entry is removed from the registry entirely.
+
+## SEE ALSO
+
+* npm-deprecate(1)
+* npm-publish(1)
+* npm-registry(1)
+* npm-adduser(1)
+* npm-owner(1)
diff --git a/deps/npm/doc/cli/update.md b/deps/npm/doc/cli/update.md
new file mode 100644
index 0000000000..1de49f2e2f
--- /dev/null
+++ b/deps/npm/doc/cli/update.md
@@ -0,0 +1,21 @@
+npm-update(1) -- Update a package
+=================================
+
+## SYNOPSIS
+
+ npm update [<name> [<name> ...]]
+
+## DESCRIPTION
+
+This command will update all the packages listed to the latest version
+(specified by the `tag` config).
+
+It will also install missing packages.
+
+## SEE ALSO
+
+* npm-install(1)
+* npm-outdated(1)
+* npm-registry(1)
+* npm-folders(1)
+* npm-list(1)
diff --git a/deps/npm/doc/cli/version.md b/deps/npm/doc/cli/version.md
new file mode 100644
index 0000000000..480c90d3b4
--- /dev/null
+++ b/deps/npm/doc/cli/version.md
@@ -0,0 +1,27 @@
+npm-version(1) -- Bump a package version
+========================================
+
+## SYNOPSIS
+
+ npm version <newversion> [--message commit-message]
+
+## DESCRIPTION
+
+Run this in a package directory to bump the version and write the new
+data back to the package.json file.
+
+The `newversion` argument should be a valid semver string, *or* a valid
+second argument to semver.inc (one of "patch", "minor", or "major"). In
+the second case, the existing version will be incremented by that amount.
+
+If run in a git repo, it will also create a version commit and tag, and
+fail if the repo is not clean.
+
+If supplied with `--message` (shorthand: `-m`) command line option, npm
+will use it as a commit message when creating a version commit.
+
+## SEE ALSO
+
+* npm-init(1)
+* npm-json(1)
+* npm-semver(1)
diff --git a/deps/npm/doc/cli/view.md b/deps/npm/doc/cli/view.md
new file mode 100644
index 0000000000..5ec9dc0aca
--- /dev/null
+++ b/deps/npm/doc/cli/view.md
@@ -0,0 +1,85 @@
+npm-view(1) -- View registry info
+=================================
+
+## SYNOPSIS
+
+ npm view <name>[@<version>] [<field>[.<subfield>]...]
+
+## DESCRIPTION
+
+This command shows data about a package and prints it to the stream
+referenced by the `outfd` config, which defaults to stdout.
+
+To show the package registry entry for the `connect` package, you can do
+this:
+
+ npm view connect
+
+The default version is "latest" if unspecified.
+
+Field names can be specified after the package descriptor.
+For example, to show the dependencies of the `ronn` package at version
+0.3.5, you could do the following:
+
+ npm view ronn@0.3.5 dependencies
+
+You can view child field by separating them with a period.
+To view the git repository URL for the latest version of npm, you could
+do this:
+
+ npm view npm repository.url
+
+This makes it easy to view information about a dependency with a bit of
+shell scripting. For example, to view all the data about the version of
+opts that ronn depends on, you can do this:
+
+ npm view opts@$(npm view ronn dependencies.opts)
+
+For fields that are arrays, requesting a non-numeric field will return
+all of the values from the objects in the list. For example, to get all
+the contributor names for the "express" project, you can do this:
+
+ npm view express contributors.email
+
+You may also use numeric indices in square braces to specifically select
+an item in an array field. To just get the email address of the first
+contributor in the list, you can do this:
+
+ npm view express contributors[0].email
+
+Multiple fields may be specified, and will be printed one after another.
+For exampls, to get all the contributor names and email addresses, you
+can do this:
+
+ npm view express contributors.name contributors.email
+
+"Person" fields are shown as a string if they would be shown as an
+object. So, for example, this will show the list of npm contributors in
+the shortened string format. (See `npm-json(1)` for more on this.)
+
+ npm view npm contributors
+
+If a version range is provided, then data will be printed for every
+matching version of the package. This will show which version of jsdom
+was required by each matching version of yui3:
+
+ npm view yui3@'>0.5.4' dependencies.jsdom
+
+## OUTPUT
+
+If only a single string field for a single version is output, then it
+will not be colorized or quoted, so as to enable piping the output to
+another command.
+
+If the version range matches multiple versions, than each printed value
+will be prefixed with the version it applies to.
+
+If multiple fields are requested, than each of them are prefixed with
+the field name.
+
+## SEE ALSO
+
+* npm-search(1)
+* npm-registry(1)
+* npm-config(1)
+* npm-docs(1)
diff --git a/deps/npm/doc/cli/whoami.md b/deps/npm/doc/cli/whoami.md
new file mode 100644
index 0000000000..7c39b1624a
--- /dev/null
+++ b/deps/npm/doc/cli/whoami.md
@@ -0,0 +1,15 @@
+npm-whoami(1) -- Display npm username
+=====================================
+
+## SYNOPSIS
+
+ npm whoami
+
+## DESCRIPTION
+
+Print the `username` config to standard output.
+
+## SEE ALSO
+
+* npm-config(1)
+* npm-adduser(1)
diff --git a/deps/npm/html/api/GubbleBum-Blocky.ttf b/deps/npm/html/api/GubbleBum-Blocky.ttf
new file mode 100755
index 0000000000..8eac02f7ad
--- /dev/null
+++ b/deps/npm/html/api/GubbleBum-Blocky.ttf
Binary files differ
diff --git a/deps/npm/html/api/style.css b/deps/npm/html/api/style.css
new file mode 100644
index 0000000000..220d162d67
--- /dev/null
+++ b/deps/npm/html/api/style.css
@@ -0,0 +1,336 @@
+
+/* reset */
+* {
+ margin:0;
+ padding:0;
+ border:none;
+ font-family:inherit;
+ font-size:inherit;
+ font-weight:inherit;
+}
+:target::before {
+ content:" >>> ";
+ position:absolute;
+ display:block;
+ opacity:0.5;
+ color:#f00;
+ margin:0 0 0 -2em;
+}
+abbr, acronym {
+ border-bottom:1px dotted #aaa;
+}
+kbd, code, pre {
+ font-family:monospace;
+ margin:0;
+ font-size:18px;
+ line-height:24px;
+ background:#eee;
+ outline:1px solid #ccc;
+}
+kbd code, kbd pre, kbd kbd,
+pre code, pre pre, pre kbd,
+code code, code pre, code kbd { outline: none }
+.dollar::before {
+ content:"$ ";
+ display:inline;
+}
+p, ul, ol, dl, pre {
+ margin:30px 0;
+ line-height:30px;
+}
+hr {
+ margin:30px auto 29px;
+ width:66%;
+ height:1px;
+ background:#aaa;
+}
+pre {
+ display:block;
+}
+dd :first-child {
+ margin-top:0;
+}
+
+body {
+ quotes:"“" "â€" "‘" "’";
+ width:666px;
+ margin:30px auto 120px;
+ font-family:Times New Roman, serif;
+ font-size:20px;
+ background:#fff;
+ line-height:30px;
+ color:#111;
+}
+
+blockquote {
+ position:relative;
+ font-size:16px;
+ line-height:30px;
+ font-weight:bold;
+ width:85%;
+ margin:0 auto;
+}
+blockquote::before {
+ font-size:90px;
+ display:block;
+ position:absolute;
+ top:20px;
+ right:100%;
+ content:"“";
+ padding-right:10px;
+ color:#ccc;
+}
+.source cite::before {
+ content:"— ";
+}
+.source {
+ padding-left:20%;
+ margin-top:30px;
+}
+.source cite span {
+ font-style:normal;
+}
+blockquote p {
+ margin-bottom:0;
+}
+.quote blockquote {
+ font-weight:normal;
+}
+
+h1, h2, h3, h4, h5, h6, dt, #header {
+ font-family:serif;
+ font-size:20px;
+ font-weight:bold;
+}
+h2 {
+ background:#eee;
+}
+h1, h2 {
+ line-height:40px;
+}
+
+i, em, cite {
+ font-style:italic;
+}
+b, strong {
+ font-weight:bold;
+}
+i, em, cite, b, strong, small {
+ line-height:28px;
+}
+small, .small, .small *, aside {
+ font-style:italic;
+ color:#669;
+ font-size:18px;
+}
+spall a, .small a {
+ text-decoration:underline;
+}
+del {
+ text-decoration:line-through;
+}
+ins {
+ text-decoration:underline;
+}
+.alignright { display:block; float:right; margin-left:1em; }
+.alignleft { display:block; float:left; margin-right:1em; }
+
+q:before, q q q:before, q q q q q:before, q q q q q q q:before { content:"“"; }
+q q:before, q q q q:before, q q q q q q:before, q q q q q q q q:before { content:"‘"; }
+q:after, q q q:after, q q q q q:after, q q q q q q q:after { content:"â€"; }
+q q:after, q q q q:after, q q q q q q:after, q q q q q q q q:after { content:"’"; }
+
+a { color:#00f; text-decoration:none; }
+a:visited { color:#636; }
+a:hover, a:active { color:#900!important; text-decoration:underline; }
+
+h1 {
+ font-weight:bold;
+ background:#fff;
+}
+h1 a, h1 a:visited {
+ font-family:gubblefont, GubbleBum Blocky, GubbleBum, monospace;
+ font-size:60px;
+ color:#900;
+ display:block;
+}
+h1 a:focus, h1 a:hover, h1 a:active {
+ color:#f00!important;
+ text-decoration:none;
+}
+
+.navigation {
+ display:table;
+ width:100%;
+ margin:0 0 30px 0;
+ position:relative;
+}
+#nav-above {
+ margin-bottom:0;
+}
+.navigation .nav-previous {
+ display:table-cell;
+ text-align:left;
+ width:50%;
+}
+/* hang the » and « off into the margins */
+.navigation .nav-previous a:before, .navigation .nav-next a:after {
+ content: "«";
+ display:block;
+ height:30px;
+ margin-bottom:-30px;
+ text-decoration:none;
+ margin-left:-15px;
+}
+.navigation .nav-next a:after {
+ content: "»";
+ text-align:right;
+ margin-left:0;
+ margin-top:-30px;
+ margin-right:-15px;
+}
+
+
+.navigation .nav-next {
+ display:table-cell;
+ text-align:right;
+ width:50%;
+}
+.navigation a {
+ display:block;
+ width:100%;
+ height:100%;
+}
+
+input, button, textarea {
+ border:0;
+ line-height:30px;
+}
+textarea {
+ height:300px;
+}
+input {
+ height:30px;
+ line-height:30px;
+}
+input.submit, input#submit, input.button, button, input[type=submit] {
+ cursor:hand; cursor:pointer;
+ outline:1px solid #ccc;
+}
+
+#wrapper {
+ margin-bottom:90px;
+ position:relative;
+ z-index:1;
+ *zoom:1;
+ background:#fff;
+}
+#wrapper:after {
+ display:block;
+ content:".";
+ visibility:hidden;
+ width:0;
+ height:0;
+ clear:both;
+}
+
+.sidebar .xoxo > li {
+ float:left;
+ width:50%;
+}
+.sidebar li {
+ list-style:none;
+}
+.sidebar #elsewhere {
+ margin-left:-10%;
+ margin-right:-10%;
+}
+.sidebar #rss-links, .sidebar #twitter-feeds {
+ float:right;
+ clear:right;
+ width:20%;
+}
+.sidebar #comment {
+ clear:both;
+ float:none;
+ width:100%;
+}
+.sidebar #search {
+ clear:both;
+ float:none;
+ width:100%;
+}
+.sidebar #search h2 {
+ margin-left:40%;
+}
+.sidebar #search #s {
+ width:90%;
+ float:left;
+}
+.sidebar #search #searchsubmit {
+ width:10%;
+ float:right;
+}
+.sidebar * {
+ font-size:15px;
+ line-height:30px;
+}
+
+#footer, #footer * {
+ text-align:right;
+ font-size:16px;
+ color:#ccc;
+ font-style:italic;
+ word-spacing:1em;
+}
+
+#toc {
+ position:absolute;
+ top:0;
+ right:0;
+ padding:40px 0 40px 20px;
+ margin:0;
+ width:200px;
+ opacity:0.2;
+ z-index:-1;
+}
+#toc:hover {
+ opacity:1;
+ background:#fff;
+ z-index:999;
+}
+#toc ul {
+ padding:0;
+ margin:0;
+}
+#toc, #toc li {
+ list-style-type:none;
+ font-size:15px;
+ line-height:15px;
+}
+#toc li {
+ padding:0 0 0 10px;
+}
+#toc li a {
+ position:relative;
+ display:block;
+}
+
+@font-face {
+ font-family:gubblefont;
+ src: url(./GubbleBum-Blocky.ttf) format("truetype");
+}
+
+@media print {
+ a[href] {
+ color:inherit;
+ }
+ a[href]:after {
+ white-space:nowrap;
+ content:" " attr(href);
+ }
+ a[href^=\#], .navigation {
+ display:none;
+ }
+}
+
diff --git a/deps/npm/html/doc/GubbleBum-Blocky.ttf b/deps/npm/html/doc/GubbleBum-Blocky.ttf
new file mode 100755
index 0000000000..8eac02f7ad
--- /dev/null
+++ b/deps/npm/html/doc/GubbleBum-Blocky.ttf
Binary files differ
diff --git a/deps/npm/html/doc/style.css b/deps/npm/html/doc/style.css
new file mode 100644
index 0000000000..220d162d67
--- /dev/null
+++ b/deps/npm/html/doc/style.css
@@ -0,0 +1,336 @@
+
+/* reset */
+* {
+ margin:0;
+ padding:0;
+ border:none;
+ font-family:inherit;
+ font-size:inherit;
+ font-weight:inherit;
+}
+:target::before {
+ content:" >>> ";
+ position:absolute;
+ display:block;
+ opacity:0.5;
+ color:#f00;
+ margin:0 0 0 -2em;
+}
+abbr, acronym {
+ border-bottom:1px dotted #aaa;
+}
+kbd, code, pre {
+ font-family:monospace;
+ margin:0;
+ font-size:18px;
+ line-height:24px;
+ background:#eee;
+ outline:1px solid #ccc;
+}
+kbd code, kbd pre, kbd kbd,
+pre code, pre pre, pre kbd,
+code code, code pre, code kbd { outline: none }
+.dollar::before {
+ content:"$ ";
+ display:inline;
+}
+p, ul, ol, dl, pre {
+ margin:30px 0;
+ line-height:30px;
+}
+hr {
+ margin:30px auto 29px;
+ width:66%;
+ height:1px;
+ background:#aaa;
+}
+pre {
+ display:block;
+}
+dd :first-child {
+ margin-top:0;
+}
+
+body {
+ quotes:"“" "â€" "‘" "’";
+ width:666px;
+ margin:30px auto 120px;
+ font-family:Times New Roman, serif;
+ font-size:20px;
+ background:#fff;
+ line-height:30px;
+ color:#111;
+}
+
+blockquote {
+ position:relative;
+ font-size:16px;
+ line-height:30px;
+ font-weight:bold;
+ width:85%;
+ margin:0 auto;
+}
+blockquote::before {
+ font-size:90px;
+ display:block;
+ position:absolute;
+ top:20px;
+ right:100%;
+ content:"“";
+ padding-right:10px;
+ color:#ccc;
+}
+.source cite::before {
+ content:"— ";
+}
+.source {
+ padding-left:20%;
+ margin-top:30px;
+}
+.source cite span {
+ font-style:normal;
+}
+blockquote p {
+ margin-bottom:0;
+}
+.quote blockquote {
+ font-weight:normal;
+}
+
+h1, h2, h3, h4, h5, h6, dt, #header {
+ font-family:serif;
+ font-size:20px;
+ font-weight:bold;
+}
+h2 {
+ background:#eee;
+}
+h1, h2 {
+ line-height:40px;
+}
+
+i, em, cite {
+ font-style:italic;
+}
+b, strong {
+ font-weight:bold;
+}
+i, em, cite, b, strong, small {
+ line-height:28px;
+}
+small, .small, .small *, aside {
+ font-style:italic;
+ color:#669;
+ font-size:18px;
+}
+spall a, .small a {
+ text-decoration:underline;
+}
+del {
+ text-decoration:line-through;
+}
+ins {
+ text-decoration:underline;
+}
+.alignright { display:block; float:right; margin-left:1em; }
+.alignleft { display:block; float:left; margin-right:1em; }
+
+q:before, q q q:before, q q q q q:before, q q q q q q q:before { content:"“"; }
+q q:before, q q q q:before, q q q q q q:before, q q q q q q q q:before { content:"‘"; }
+q:after, q q q:after, q q q q q:after, q q q q q q q:after { content:"â€"; }
+q q:after, q q q q:after, q q q q q q:after, q q q q q q q q:after { content:"’"; }
+
+a { color:#00f; text-decoration:none; }
+a:visited { color:#636; }
+a:hover, a:active { color:#900!important; text-decoration:underline; }
+
+h1 {
+ font-weight:bold;
+ background:#fff;
+}
+h1 a, h1 a:visited {
+ font-family:gubblefont, GubbleBum Blocky, GubbleBum, monospace;
+ font-size:60px;
+ color:#900;
+ display:block;
+}
+h1 a:focus, h1 a:hover, h1 a:active {
+ color:#f00!important;
+ text-decoration:none;
+}
+
+.navigation {
+ display:table;
+ width:100%;
+ margin:0 0 30px 0;
+ position:relative;
+}
+#nav-above {
+ margin-bottom:0;
+}
+.navigation .nav-previous {
+ display:table-cell;
+ text-align:left;
+ width:50%;
+}
+/* hang the » and « off into the margins */
+.navigation .nav-previous a:before, .navigation .nav-next a:after {
+ content: "«";
+ display:block;
+ height:30px;
+ margin-bottom:-30px;
+ text-decoration:none;
+ margin-left:-15px;
+}
+.navigation .nav-next a:after {
+ content: "»";
+ text-align:right;
+ margin-left:0;
+ margin-top:-30px;
+ margin-right:-15px;
+}
+
+
+.navigation .nav-next {
+ display:table-cell;
+ text-align:right;
+ width:50%;
+}
+.navigation a {
+ display:block;
+ width:100%;
+ height:100%;
+}
+
+input, button, textarea {
+ border:0;
+ line-height:30px;
+}
+textarea {
+ height:300px;
+}
+input {
+ height:30px;
+ line-height:30px;
+}
+input.submit, input#submit, input.button, button, input[type=submit] {
+ cursor:hand; cursor:pointer;
+ outline:1px solid #ccc;
+}
+
+#wrapper {
+ margin-bottom:90px;
+ position:relative;
+ z-index:1;
+ *zoom:1;
+ background:#fff;
+}
+#wrapper:after {
+ display:block;
+ content:".";
+ visibility:hidden;
+ width:0;
+ height:0;
+ clear:both;
+}
+
+.sidebar .xoxo > li {
+ float:left;
+ width:50%;
+}
+.sidebar li {
+ list-style:none;
+}
+.sidebar #elsewhere {
+ margin-left:-10%;
+ margin-right:-10%;
+}
+.sidebar #rss-links, .sidebar #twitter-feeds {
+ float:right;
+ clear:right;
+ width:20%;
+}
+.sidebar #comment {
+ clear:both;
+ float:none;
+ width:100%;
+}
+.sidebar #search {
+ clear:both;
+ float:none;
+ width:100%;
+}
+.sidebar #search h2 {
+ margin-left:40%;
+}
+.sidebar #search #s {
+ width:90%;
+ float:left;
+}
+.sidebar #search #searchsubmit {
+ width:10%;
+ float:right;
+}
+.sidebar * {
+ font-size:15px;
+ line-height:30px;
+}
+
+#footer, #footer * {
+ text-align:right;
+ font-size:16px;
+ color:#ccc;
+ font-style:italic;
+ word-spacing:1em;
+}
+
+#toc {
+ position:absolute;
+ top:0;
+ right:0;
+ padding:40px 0 40px 20px;
+ margin:0;
+ width:200px;
+ opacity:0.2;
+ z-index:-1;
+}
+#toc:hover {
+ opacity:1;
+ background:#fff;
+ z-index:999;
+}
+#toc ul {
+ padding:0;
+ margin:0;
+}
+#toc, #toc li {
+ list-style-type:none;
+ font-size:15px;
+ line-height:15px;
+}
+#toc li {
+ padding:0 0 0 10px;
+}
+#toc li a {
+ position:relative;
+ display:block;
+}
+
+@font-face {
+ font-family:gubblefont;
+ src: url(./GubbleBum-Blocky.ttf) format("truetype");
+}
+
+@media print {
+ a[href] {
+ color:inherit;
+ }
+ a[href]:after {
+ white-space:nowrap;
+ content:" " attr(href);
+ }
+ a[href^=\#], .navigation {
+ display:none;
+ }
+}
+
diff --git a/deps/npm/html/docfoot.html b/deps/npm/html/docfoot.html
new file mode 100644
index 0000000000..3e35341cc3
--- /dev/null
+++ b/deps/npm/html/docfoot.html
@@ -0,0 +1,34 @@
+</div>
+<p id="footer">@NAME@ &mdash; npm@@VERSION@</p>
+<script>
+;(function () {
+var wrapper = document.getElementById("wrapper")
+var els = Array.prototype.slice.call(wrapper.getElementsByTagName("*"), 0)
+ .filter(function (el) {
+ return el.parentNode === wrapper
+ && el.tagName.match(/H[1-6]/)
+ && el.id
+ })
+var l = 2
+ , toc = document.createElement("ul")
+toc.innerHTML = els.map(function (el) {
+ var i = el.tagName.charAt(1)
+ , out = ""
+ while (i > l) {
+ out += "<ul>"
+ l ++
+ }
+ while (i < l) {
+ out += "</ul>"
+ l --
+ }
+ out += "<li><a href='#" + el.id + "'>" +
+ ( el.innerText || el.text || el.innerHTML)
+ + "</a>"
+ return out
+}).join("\n")
+toc.id = "toc"
+document.body.appendChild(toc)
+})()
+</script>
+</body></html>
diff --git a/deps/npm/html/dochead.html b/deps/npm/html/dochead.html
new file mode 100644
index 0000000000..c96b0d733f
--- /dev/null
+++ b/deps/npm/html/dochead.html
@@ -0,0 +1,8 @@
+<!doctype html>
+<html>
+ <title>@NAME@</title>
+ <meta http-equiv="content-type" value="text/html;utf-8">
+ <link rel="stylesheet" type="text/css" href="./style.css">
+
+ <body>
+ <div id="wrapper">
diff --git a/deps/npm/html/favicon.ico b/deps/npm/html/favicon.ico
new file mode 100644
index 0000000000..9e0d4eef78
--- /dev/null
+++ b/deps/npm/html/favicon.ico
Binary files differ
diff --git a/deps/npm/html/index.html b/deps/npm/html/index.html
new file mode 100644
index 0000000000..9ccf28e57c
--- /dev/null
+++ b/deps/npm/html/index.html
@@ -0,0 +1,86 @@
+<!doctype html>
+
+<html>
+<head>
+<style>
+ html { background:#202050;
+ font-family:CentSchbook Mono BT, Bitstream Vera Sans Mono, monofont, monospace;
+ }
+body { background:#ddd; width:600px; border:10px solid #fff; margin:2em auto; padding:2em }
+h1 {
+ font-size:200px;
+ line-height:1;
+ font-family:"gubblebum-blocky", monospace;
+ color:#f00;
+ text-align:center;
+ padding:0;
+ margin:0 auto;
+ text-indent:-999em;
+ height:202px;
+ width:519px;
+ background:url(npm.png) center;
+}
+h2 {
+ color:#202050;
+ font-size:100%;
+}
+p, ul, ol { margin:1em 0 0; padding:0 }
+li { list-style-position:inside }
+a { color:#f00; text-decoration:none; }
+a:hover { text-decoration:underline; }
+code { background:#fff ; outline: 1px solid #ccc; padding:0 2px; }
+
+@font-face {
+ font-family:monofont;
+ src: url(http://foohack.com/tpl/fonts/Bitstream-Vera-Sans-Mono/VeraMono.ttf) format("truetype");
+}
+@font-face {
+ font-family:monofont;
+ font-style:italic;
+ src: url(http://foohack.com/tpl/fonts/Bitstream-Vera-Sans-Mono/VeraMoIt.ttf) format("truetype");
+}
+@font-face {
+ font-family:monofont;
+ font-weight:bold;
+ src: url(http://foohack.com/tpl/fonts/Bitstream-Vera-Sans-Mono/VeraMoBd.ttf) format("truetype");
+}
+@font-face {
+ font-family:monofont;
+ font-style:italic;
+ font-weight:bold;
+ src: url(http://foohack.com/tpl/fonts/Bitstream-Vera-Sans-Mono/VeraMoBI.ttf) format("truetype");
+}
+
+</style>
+ <title>npm - Node Package Manager</title>
+</head>
+<h1>npm</h1>
+
+<p>npm is a package manager for <a href="http://nodejs.org/">node</a>. You can use it to install
+ and publish your node programs. It manages dependencies and does other cool stuff.</p>
+
+<h2>One Line Install</h2>
+
+<code>curl http://npmjs.org/install.sh | sh</code>
+
+<h2>More Than One Line Install</h2>
+
+<ol>
+ <li><a href="https://github.com/isaacs/npm">Get the code.</a>
+ <li>Do what <a href="http://npmjs.org/doc/README.html">the README</a>
+ says to do.
+</ol>
+
+<h2>Other Cool Stuff</h2>
+
+<ul>
+ <li><a href="http://npmjs.org/doc/README.html">README</a>
+ <li><a href="doc/">Help Documentation</a>
+ <li><a href="doc/faq.html">FAQ</a>
+ <li><a href="http://search.npmjs.org/">Search for Packages</a>
+ <li><a href="http://groups.google.com/group/npm-">Mailing List</a>
+ <li><a href="https://github.com/isaacs/npm/issues">Bugs</a>
+</ul>
+
+</body>
+</html>
diff --git a/deps/npm/html/n-64.png b/deps/npm/html/n-64.png
new file mode 100644
index 0000000000..d4145efae6
--- /dev/null
+++ b/deps/npm/html/n-64.png
Binary files differ
diff --git a/deps/npm/html/n-large.png b/deps/npm/html/n-large.png
new file mode 100644
index 0000000000..9e1525f05a
--- /dev/null
+++ b/deps/npm/html/n-large.png
Binary files differ
diff --git a/deps/npm/html/npm-16.png b/deps/npm/html/npm-16.png
new file mode 100644
index 0000000000..c3c9d05253
--- /dev/null
+++ b/deps/npm/html/npm-16.png
Binary files differ
diff --git a/deps/npm/html/npm-256-square.png b/deps/npm/html/npm-256-square.png
new file mode 100644
index 0000000000..f7f18b560e
--- /dev/null
+++ b/deps/npm/html/npm-256-square.png
Binary files differ
diff --git a/deps/npm/html/npm-256w.png b/deps/npm/html/npm-256w.png
new file mode 100644
index 0000000000..dac32c8a02
--- /dev/null
+++ b/deps/npm/html/npm-256w.png
Binary files differ
diff --git a/deps/npm/html/npm-64-square.png b/deps/npm/html/npm-64-square.png
new file mode 100644
index 0000000000..eef2629d27
--- /dev/null
+++ b/deps/npm/html/npm-64-square.png
Binary files differ
diff --git a/deps/npm/html/npm-fin.png b/deps/npm/html/npm-fin.png
new file mode 100644
index 0000000000..7efbef6274
--- /dev/null
+++ b/deps/npm/html/npm-fin.png
Binary files differ
diff --git a/deps/npm/html/npm-large.png b/deps/npm/html/npm-large.png
new file mode 100644
index 0000000000..27d83177e1
--- /dev/null
+++ b/deps/npm/html/npm-large.png
Binary files differ
diff --git a/deps/npm/html/npm.png b/deps/npm/html/npm.png
new file mode 100644
index 0000000000..d78ff53f2a
--- /dev/null
+++ b/deps/npm/html/npm.png
Binary files differ
diff --git a/deps/npm/lib/adduser.js b/deps/npm/lib/adduser.js
new file mode 100644
index 0000000000..b6d08ed760
--- /dev/null
+++ b/deps/npm/lib/adduser.js
@@ -0,0 +1,50 @@
+
+module.exports = adduser
+
+var registry = require("./utils/npm-registry-client/index.js")
+ , ini = require("./utils/ini.js")
+ , log = require("./utils/log.js")
+ , npm = require("./npm.js")
+ , prompt = require("./utils/prompt.js")
+ , promiseChain = require("./utils/promise-chain.js")
+ , crypto
+
+try {
+ crypto = process.binding("crypto") && require("crypto")
+} catch (ex) {}
+
+adduser.usage = "npm adduser\nThen enter stuff at the prompts"
+
+function adduser (args, cb) {
+ if (!crypto) return cb(new Error(
+ "You must compile node with ssl support to use the adduser feature"))
+
+ var u = { u : npm.config.get("username")
+ , p : npm.config.get("_password")
+ , e : npm.config.get("email")
+ }
+ , changed = false
+
+ promiseChain(cb)
+ (prompt, ["Username: ", u.u], function (un) {
+ changed = u.u !== un
+ u.u = un
+ })
+ (function (cb) {
+ if (u.p && !changed) return cb(null, u.p)
+ prompt("Password: ", u.p, true, cb)
+ }, [], function (pw) { u.p = pw })
+ (prompt, ["Email: ", u.e], function (em) { u.e = em })
+ (function (cb) {
+ if (changed) npm.config.del("_auth")
+ registry.adduser(u.u, u.p, u.e, function (er) {
+ if (er) return cb(er)
+ ini.set("username", u.u, "user")
+ ini.set("_password", u.p, "user")
+ ini.set("email", u.e, "user")
+ log("Authorized user " + u.u, "adduser")
+ ini.save("user", cb)
+ })
+ })
+ ()
+}
diff --git a/deps/npm/lib/bin.js b/deps/npm/lib/bin.js
new file mode 100644
index 0000000000..bd82bc9e31
--- /dev/null
+++ b/deps/npm/lib/bin.js
@@ -0,0 +1,19 @@
+module.exports = bin
+
+var npm = require("./npm.js")
+ , output = require("./utils/output.js")
+
+bin.usage = "npm bin\nnpm bin -g\n(just prints the bin folder)"
+
+function bin (args, cb) {
+ var path = require("path")
+ , b = npm.bin
+ , PATH = (process.env.PATH || "").split(":")
+
+ output.write(b, function (er) { cb(er, b) })
+
+ if (npm.config.get("global") && PATH.indexOf(b) === -1) {
+ output.write("(not in PATH env variable)"
+ ,npm.config.get("logfd"))
+ }
+}
diff --git a/deps/npm/lib/bugs.js b/deps/npm/lib/bugs.js
new file mode 100644
index 0000000000..1c52ffda0e
--- /dev/null
+++ b/deps/npm/lib/bugs.js
@@ -0,0 +1,50 @@
+
+module.exports = bugs
+
+bugs.usage = "npm bugs <pkgname>"
+
+bugs.completion = function (opts, cb) {
+ if (opts.conf.argv.remain.length > 2) return cb()
+ registry.get("/-/short", null, 60000, function (er, list) {
+ return cb(null, list || [])
+ })
+}
+
+var exec = require("./utils/exec.js")
+ , registry = require("./utils/npm-registry-client/index.js")
+ , npm = require("./npm.js")
+ , log = require("./utils/log.js")
+
+function bugs (args, cb) {
+ if (!args.length) return cb(bugs.usage)
+ var n = args[0].split("@").shift()
+ registry.get(n, "latest", 3600, function (er, d) {
+ if (er) return cb(er)
+ var bugs = d.bugs
+ , repo = d.repository || d.repositories
+ if (bugs) {
+ if (typeof bugs === "string") return open(bugs, cb)
+ if (bugs.url) return open(bugs.url, cb)
+ }
+ if (repo) {
+ if (Array.isArray(repo)) repo = repo.shift()
+ if (repo.url) repo = repo.url
+ log.verbose(repo, "repository")
+ if (repo && repo.match(/^(https?:\/\/|git(:\/\/|@))github.com/)) {
+ return open(repo.replace(/^git(@|:\/\/)/, "http://")
+ .replace(/^https?:\/\/github.com:/, "github.com/")
+ .replace(/\.git$/, '')+"/issues", cb)
+ }
+ }
+ return open("http://search.npmjs.org/#/" + d.name, cb)
+ })
+}
+
+function open (url, cb) {
+ exec(npm.config.get("browser"), [url], log.er(cb,
+ "Failed to open "+url+" in a browser. It could be that the\n"+
+ "'browser' config is not set. Try doing this:\n"+
+ " npm config set browser google-chrome\n"+
+ "or:\n"+
+ " npm config set browser lynx\n"))
+}
diff --git a/deps/npm/lib/build.js b/deps/npm/lib/build.js
new file mode 100644
index 0000000000..864eb27cf6
--- /dev/null
+++ b/deps/npm/lib/build.js
@@ -0,0 +1,186 @@
+
+// npm build command
+
+// everything about the installation after the creation of
+// the .npm/{name}/{version}/package folder.
+// linking the modules into the npm.root,
+// resolving dependencies, etc.
+
+// This runs AFTER install or link are completed.
+
+var npm = require("./npm.js")
+ , log = require("./utils/log.js")
+ , chain = require("slide").chain
+ , fs = require("graceful-fs")
+ , path = require("path")
+ , lifecycle = require("./utils/lifecycle.js")
+ , readJson = require("./utils/read-json.js")
+ , link = require("./utils/link.js")
+ , linkIfExists = link.ifExists
+ , cmdShim = require("./utils/cmd-shim.js")
+ , cmdShimIfExists = cmdShim.ifExists
+ , asyncMap = require("slide").asyncMap
+ , output = require("./utils/output.js")
+
+module.exports = build
+build.usage = "npm build <folder>\n(this is plumbing)"
+
+build._didBuild = {}
+build._noLC = {}
+function build (args, global, didPre, didRB, cb) {
+ if (typeof cb !== "function") cb = didRB, didRB = false
+ if (typeof cb !== "function") cb = didPre, didPre = false
+ if (typeof cb !== "function") {
+ cb = global, global = npm.config.get("global")
+ }
+ // it'd be nice to asyncMap these, but actually, doing them
+ // in parallel generally munges up the output from node-waf
+ var builder = build_(global, didPre, didRB)
+ chain(args.map(function (arg) { return function (cb) {
+ builder(arg, cb)
+ }}), cb)
+}
+
+function build_ (global, didPre, didRB) { return function (folder, cb) {
+ folder = path.resolve(folder)
+ build._didBuild[folder] = true
+ log.info(folder, "build")
+ readJson(path.resolve(folder, "package.json"), function (er, pkg) {
+ if (er) return cb(er)
+ chain
+ ( [ !didPre && [lifecycle, pkg, "preinstall", folder]
+ , [linkStuff, pkg, folder, global, didRB]
+ , pkg.name === "npm" && [writeBuiltinConf, folder]
+ , didPre !== build._noLC && [lifecycle, pkg, "install", folder]
+ , didPre !== build._noLC && [lifecycle, pkg, "postinstall", folder]
+ , didPre !== build._noLC
+ && npm.config.get("npat")
+ && [lifecycle, pkg, "test", folder] ]
+ , cb )
+ })
+}}
+
+function writeBuiltinConf (folder, cb) {
+ // the builtin config is "sticky". Any time npm installs itself,
+ // it puts its builtin config file there, as well.
+ var ini = require("./utils/ini.js")
+ ini.saveConfig("builtin", path.resolve(folder, "npmrc"), cb)
+}
+
+function linkStuff (pkg, folder, global, didRB, cb) {
+ // if it's global, and folder is in {prefix}/node_modules,
+ // then bins are in {prefix}/bin
+ // otherwise, then bins are in folder/../.bin
+ var parent = path.dirname(folder)
+ , gnm = global && npm.globalDir
+ , top = parent === npm.dir
+ , gtop = parent === gnm
+
+ log.verbose([global, gnm, gtop, parent], "linkStuff")
+ log(pkg._id, "linkStuff")
+
+ if (top && pkg.preferGlobal && !global) {
+ log.warn(pkg._id + " should be installed with -g", "prefer global")
+ }
+
+ asyncMap( [linkBins, linkMans, !didRB && rebuildBundles]
+ , function (fn, cb) {
+ if (!fn) return cb()
+ log.verbose(pkg._id, fn.name)
+ fn(pkg, folder, parent, gtop, cb)
+ }, cb)
+}
+
+function rebuildBundles (pkg, folder, parent, gtop, cb) {
+ if (!npm.config.get("rebuild-bundle")) return cb()
+
+ var deps = Object.keys(pkg.dependencies || {})
+ .concat(Object.keys(pkg.devDependencies || {}))
+ , bundles = pkg.bundleDependencies || pkg.bundledDependencies || []
+
+ fs.readdir(path.resolve(folder, "node_modules"), function (er, files) {
+ // error means no bundles
+ if (er) return cb()
+
+ log.verbose(files, "rebuildBundles")
+ // don't asyncMap these, because otherwise build script output
+ // gets interleaved and is impossible to read
+ chain(files.filter(function (file) {
+ // rebuild if:
+ // not a .folder, like .bin or .hooks
+ return file.charAt(0) !== "."
+ // not some old 0.x style bundle
+ && file.indexOf("@") === -1
+ // either not a dep, or explicitly bundled
+ && (deps.indexOf(file) === -1 || bundles.indexOf(file) !== -1)
+ }).map(function (file) {
+ file = path.resolve(folder, "node_modules", file)
+ return function (cb) {
+ if (build._didBuild[file]) return cb()
+ log.verbose(file, "rebuild bundle")
+ // if file is not a package dir, then don't do it.
+ fs.lstat(path.resolve(file, "package.json"), function (er, st) {
+ if (er) return cb()
+ build_(false)(file, cb)
+ })
+ }}), cb)
+ })
+}
+
+function linkBins (pkg, folder, parent, gtop, cb) {
+ if (!pkg.bin || !gtop && path.basename(parent) !== "node_modules") {
+ return cb()
+ }
+ var binRoot = gtop ? npm.globalBin
+ : path.resolve(parent, ".bin")
+ log.verbose([pkg.bin, binRoot, gtop], "bins linking")
+
+ asyncMap(Object.keys(pkg.bin), function (b, cb) {
+ linkBin( path.resolve(folder, pkg.bin[b])
+ , path.resolve(binRoot, b)
+ , gtop && folder
+ , function (er) {
+ if (er) return cb(er)
+ // bins should always be executable.
+ // XXX skip chmod on windows?
+ fs.chmod(path.resolve(folder, pkg.bin[b]), npm.modes.exec, function (er) {
+ if (er || !gtop) return cb(er)
+ var dest = path.resolve(binRoot, b)
+ , src = path.resolve(folder, pkg.bin[b])
+ , out = npm.config.get("parseable")
+ ? dest + "::" + src + ":BINFILE"
+ : dest + " -> " + src
+ output.write(out, cb)
+ })
+ })
+ }, cb)
+}
+
+function linkBin (from, to, gently, cb) {
+ if (process.platform !== "win32") {
+ return linkIfExists(from, to, gently, cb)
+ } else {
+ return cmdShimIfExists(from, to, cb)
+ }
+}
+
+function linkMans (pkg, folder, parent, gtop, cb) {
+ if (!pkg.man || !gtop || process.platform === "win32") return cb()
+ var manRoot = path.resolve(npm.config.get("prefix"), "share", "man")
+ asyncMap(pkg.man, function (man, cb) {
+ if (typeof man !== "string") return cb()
+ var parseMan = man.match(/(.*)\.([0-9]+)(\.gz)?$/)
+ , stem = parseMan[1]
+ , sxn = parseMan[2]
+ , gz = parseMan[3] || ""
+ , bn = path.basename(stem)
+ , manSrc = path.join( folder, man )
+ , manDest = path.join( manRoot
+ , "man"+sxn
+ , (bn.indexOf(pkg.name) === 0 ? bn
+ : pkg.name + "-" + bn)
+ + "." + sxn + gz
+ )
+ linkIfExists(manSrc, manDest, gtop && folder, cb)
+ }, cb)
+}
diff --git a/deps/npm/lib/cache.js b/deps/npm/lib/cache.js
new file mode 100644
index 0000000000..bde72c4f95
--- /dev/null
+++ b/deps/npm/lib/cache.js
@@ -0,0 +1,735 @@
+// XXX lib/utils/tar.js and this file need to be rewritten.
+
+/*
+adding a folder:
+1. tar into tmp/random/package.tgz
+2. untar into tmp/random/contents/{blah}
+3. rename {blah} to "package"
+4. tar tmp/random/contents/package to cache/n/v/package.tgz
+5. untar cache/n/v/package.tgz into cache/n/v/package
+6. rm tmp/random
+
+Adding a url:
+1. fetch to tmp/random/package.tgz
+2. goto folder(2)
+
+adding a name@version:
+1. registry.get(name, version)
+2. if response isn't 304, add url(dist.tarball)
+
+adding a name@range:
+1. registry.get(name)
+2. Find a version that satisfies
+3. add name@version
+
+adding a local tarball:
+1. untar to tmp/random/{blah}
+2. goto folder(2)
+*/
+
+exports = module.exports = cache
+exports.read = read
+exports.clean = clean
+exports.unpack = unpack
+
+var mkdir = require("./utils/mkdir-p.js")
+ , exec = require("./utils/exec.js")
+ , fetch = require("./utils/fetch.js")
+ , npm = require("./npm.js")
+ , fs = require("graceful-fs")
+ , rm = require("rimraf")
+ , readJson = require("./utils/read-json.js")
+ , registry = require("./utils/npm-registry-client/index.js")
+ , log = require("./utils/log.js")
+ , path = require("path")
+ , output
+ , sha = require("./utils/sha.js")
+ , find = require("./utils/find.js")
+ , asyncMap = require("slide").asyncMap
+ , semver = require("semver")
+ , tar = require("./utils/tar.js")
+ , fileCompletion = require("./utils/completion/file-completion.js")
+ , url = require("url")
+
+cache.usage = "npm cache add <tarball file>"
+ + "\nnpm cache add <folder>"
+ + "\nnpm cache add <tarball url>"
+ + "\nnpm cache add <git url>"
+ + "\nnpm cache add <name>@<version>"
+ + "\nnpm cache ls [<path>]"
+ + "\nnpm cache clean [<pkg>[@<version>]]"
+
+cache.completion = function (opts, cb) {
+
+ var argv = opts.conf.argv.remain
+ if (argv.length === 2) {
+ return cb(null, ["add", "ls", "clean"])
+ }
+
+ switch (argv[2]) {
+ case "clean":
+ case "ls":
+ // cache and ls are easy, because the completion is
+ // what ls_ returns anyway.
+ // just get the partial words, minus the last path part
+ var p = path.dirname(opts.partialWords.slice(3).join("/"))
+ if (p === ".") p = ""
+ return ls_(p, 2, cb)
+ case "add":
+ // Same semantics as install and publish.
+ return npm.commands.install.completion(opts, cb)
+ }
+}
+
+function cache (args, cb) {
+ var cmd = args.shift()
+ switch (cmd) {
+ case "rm": case "clear": case "clean": return clean(args, cb)
+ case "list": case "sl": case "ls": return ls(args, cb)
+ case "add": return add(args, cb)
+ default: return cb(new Error(
+ "Invalid cache action: "+cmd))
+ }
+}
+
+// if the pkg and ver are in the cache, then
+// just do a readJson and return.
+// if they're not, then fetch them from the registry.
+var cacheSeen = {}
+function read (name, ver, forceBypass, cb) {
+ if (typeof cb !== "function") cb = forceBypass, forceBypass = true
+ var jsonFile = path.join(npm.cache, name, ver, "package", "package.json")
+ function c (er, data) {
+ if (!er) cacheSeen[data._id] = data
+ if (data) deprCheck(data)
+ return cb(er, data)
+ }
+
+ if (forceBypass
+ && (npm.config.get("force")
+ || process.platform === "cygwin")) {
+ log.verbose(true, "force found, skipping cache")
+ return addNamed(name, ver, c)
+ }
+
+ if (name+"@"+ver in cacheSeen) {
+ return cb(null, cacheSeen[name+"@"+ver])
+ }
+
+ readJson(jsonFile, function (er, data) {
+ if (er) return addNamed(name, ver, c)
+ deprCheck(data)
+ c(er, data)
+ })
+}
+
+// npm cache ls [<path>]
+function ls (args, cb) {
+ output = output || require("./utils/output.js")
+ args = args.join("/").split("@").join("/")
+ if (args.substr(-1) === "/") args = args.substr(0, args.length - 1)
+ ls_(args, npm.config.get("depth"), function(er, files) {
+ output.write(files.map(function (f) {
+ return path.join("~/.npm", f)
+ }).join("\n").trim(), function (er) {
+ return cb(er, files)
+ })
+ })
+}
+
+// Calls cb with list of cached pkgs matching show.
+function ls_ (req, depth, cb) {
+ return fileCompletion(npm.cache, req, depth, cb)
+}
+
+// npm cache clean [<path>]
+function clean (args, cb) {
+ if (!cb) cb = args, args = []
+ if (!args) args = []
+ args = args.join("/").split("@").join("/")
+ if (args.substr(-1) === "/") args = args.substr(0, args.length - 1)
+ var f = path.join(npm.cache, path.normalize(args))
+ if (f === npm.cache) {
+ fs.readdir(npm.cache, function (er, files) {
+ if (er) return cb()
+ asyncMap( files.filter(function (f) {
+ return npm.config.get("force") || f !== "-"
+ }).map(function (f) {
+ return path.join(npm.cache, f)
+ })
+ , rm, cb )
+ })
+ } else rm(path.join(npm.cache, path.normalize(args)), cb)
+}
+
+// npm cache add <tarball-url>
+// npm cache add <pkg> <ver>
+// npm cache add <tarball>
+// npm cache add <folder>
+exports.add = function (pkg, ver, scrub, cb) {
+ if (typeof cb !== "function") cb = scrub, scrub = false
+ if (typeof cb !== "function") cb = ver, ver = null
+ if (scrub) {
+ return clean([], function (er) {
+ if (er) return cb(er)
+ add([pkg, ver], cb)
+ })
+ }
+ log.verbose([pkg, ver], "cache add")
+ return add([pkg, ver], cb)
+}
+
+function add (args, cb) {
+ // this is hot code. almost everything passes through here.
+ // the args can be any of:
+ // ["url"]
+ // ["pkg", "version"]
+ // ["pkg@version"]
+ // ["pkg", "url"]
+ // This is tricky, because urls can contain @
+ // Also, in some cases we get [name, null] rather
+ // that just a single argument.
+
+ var usage = "Usage:\n"
+ + " npm cache add <tarball-url>\n"
+ + " npm cache add <pkg>@<ver>\n"
+ + " npm cache add <tarball>\n"
+ + " npm cache add <folder>\n"
+ , name
+ , spec
+
+ if (args[1] === undefined) args[1] = null
+
+ // at this point the args length must ==2
+ if (args[1] !== null) {
+ name = args[0]
+ spec = args[1]
+ } else if (args.length === 2) {
+ spec = args[0]
+ }
+
+ log.silly([name, spec, args], "cache add: name, spec, args")
+
+ if (!name && !spec) return cb(usage)
+
+ // see if the spec is a url
+ // otherwise, treat as name@version
+ var p = url.parse(spec.replace(/^git\+/, "git")) || {}
+ log.verbose(p, "parsed url")
+
+ // it could be that we got name@http://blah
+ // in that case, we will not have a protocol now, but if we
+ // split and check, we will.
+ if (!name && !p.protocol && spec.indexOf("@") !== -1) {
+ spec = spec.split("@")
+ name = spec.shift()
+ spec = spec.join("@")
+ return add([name, spec], cb)
+ }
+
+ switch (p.protocol) {
+ case "http:":
+ case "https:":
+ return addRemoteTarball(spec, null, name, cb)
+ case "git:":
+ case "githttp:":
+ case "githttps:":
+ case "gitrsync:":
+ case "gitftp:":
+ case "gitssh:":
+ //p.protocol = p.protocol.replace(/^git([^:])/, "$1")
+ return addRemoteGit(spec, p, name, cb)
+ default:
+ // if we have a name and a spec, then try name@spec
+ // if not, then try just spec (which may try name@"" if not found)
+ return name ? addNamed(name, spec, cb) : addLocal(spec, cb)
+ }
+}
+
+// Only have a single download action at once for a given url
+// additional calls stack the callbacks.
+var inFlightURLs = {}
+function addRemoteTarball (u, shasum, name, cb_) {
+ if (typeof cb_ !== "function") cb_ = name, name = ""
+ if (typeof cb_ !== "function") cb_ = shasum, shasum = null
+
+ if (!inFlightURLs[u]) inFlightURLs[u] = []
+ var iF = inFlightURLs[u]
+ iF.push(cb_)
+ if (iF.length > 1) return
+
+ function cb (er, data) {
+ var c
+ while (c = iF.shift()) c(er, data)
+ delete inFlightURLs[u]
+ }
+
+ log.verbose([u, shasum], "addRemoteTarball")
+ var tmp = path.join(npm.tmp, Date.now()+"-"+Math.random(), "tmp.tgz")
+ mkdir(path.dirname(tmp), function (er) {
+ if (er) return cb(er)
+ fetch(u, tmp, function (er) {
+ if (er) return log.er(cb, "failed to fetch "+u)(er)
+ if (!shasum) return done()
+ // validate that the url we just downloaded matches the expected shasum.
+ sha.check(tmp, shasum, done)
+ })
+ })
+ function done (er) {
+ if (er) return cb(er)
+ addLocalTarball(tmp, name, cb)
+ }
+}
+
+// For now, this is kind of dumb. Just basically treat git as
+// yet another "fetch and scrub" kind of thing.
+// Clone to temp folder, then proceed with the addLocal stuff.
+function addRemoteGit (u, parsed, name, cb_) {
+ if (typeof cb_ !== "function") cb_ = name, name = null
+
+ if (!inFlightURLs[u]) inFlightURLs[u] = []
+ var iF = inFlightURLs[u]
+ iF.push(cb_)
+ if (iF.length > 1) return
+
+ function cb (er, data) {
+ var c
+ while (c = iF.shift()) c(er, data)
+ delete inFlightURLs[u]
+ }
+
+ // figure out what we should check out.
+ var co = parsed.hash && parsed.hash.substr(1) || "master"
+ u = u.replace(/^git\+/, "")
+ .replace(/#.*$/, "")
+ .replace(/^ssh:\/\//, "") // ssh is the default anyway
+ log.verbose([u, co], "addRemoteGit")
+
+ var tmp = path.join(npm.tmp, Date.now()+"-"+Math.random())
+ mkdir(path.dirname(tmp), function (er) {
+ if (er) return cb(er)
+ exec( "git", ["clone", u, tmp], null, false
+ , function (er, code, stdout, stderr) {
+ stdout = (stdout + "\n" + stderr).trim()
+ if (er) {
+ log.error(stdout, "git clone "+u)
+ return cb(er)
+ }
+ log.verbose(stdout, "git clone "+u)
+ exec( "git", ["checkout", co], null, false, tmp
+ , function (er, code, stdout, stderr) {
+ stdout = (stdout + "\n" + stderr).trim()
+ if (er) {
+ log.error(stdout, "git checkout "+co)
+ return cb(er)
+ }
+ log.verbose(stdout, "git checkout "+co)
+ addLocalDirectory(tmp, cb)
+ })
+ })
+ })
+}
+
+
+// only have one request in flight for a given
+// name@blah thing.
+var inFlightNames = {}
+function addNamed (name, x, cb_) {
+ log.info([name, x], "addNamed")
+ var k = name + "@" + x
+ if (!inFlightNames[k]) inFlightNames[k] = []
+ var iF = inFlightNames[k]
+ iF.push(cb_)
+ if (iF.length > 1) return
+
+ function cb (er, data) {
+ var c
+ while (c = iF.shift()) c(er, data)
+ delete inFlightNames[k]
+ }
+
+ log.verbose([semver.valid(x), semver.validRange(x)], "addNamed")
+ return ( null !== semver.valid(x) ? addNameVersion
+ : null !== semver.validRange(x) ? addNameRange
+ : addNameTag
+ )(name, x, cb)
+}
+
+function addNameTag (name, tag, cb) {
+ log([name, tag], "addNameTag")
+ var explicit = true
+ if (!tag) {
+ explicit = false
+ tag = npm.config.get("tag")
+ }
+
+ registry.get(name, function (er, data, json, response) {
+ if (er) return cb(er)
+ engineFilter(data)
+ if (data["dist-tags"] && data["dist-tags"][tag]
+ && data.versions[data["dist-tags"][tag]]) {
+ return addNameVersion(name, data["dist-tags"][tag], cb)
+ }
+ if (!explicit && Object.keys(data.versions).length) {
+ return addNameRange(name, "*", cb)
+ }
+ return cb(installTargetsError(tag, data))
+ })
+}
+
+function engineFilter (data) {
+ var npmv = npm.version
+ , nodev = npm.config.get("node-version")
+
+ if (!nodev || npm.config.get("force")) return data
+
+ Object.keys(data.versions || {}).forEach(function (v) {
+ var eng = data.versions[v].engines
+ if (!eng) return
+ if (eng.node && !semver.satisfies(nodev, eng.node)
+ || eng.npm && !semver.satisfies(npmv, eng.npm)) {
+ delete data.versions[v]
+ }
+ })
+}
+
+function addNameRange (name, range, cb) {
+ range = semver.validRange(range)
+ if (range === null) return cb(new Error(
+ "Invalid version range: "+range))
+ registry.get(name, function (er, data, json, response) {
+ if (er) return cb(er)
+ engineFilter(data)
+ // if the tagged version satisfies, then use that.
+ var tagged = data["dist-tags"][npm.config.get("tag")]
+ if (tagged && data.versions[tagged] && semver.satisfies(tagged, range)) {
+ return addNameVersion(name, tagged, cb)
+ }
+ // find the max satisfying version.
+ var ms = semver.maxSatisfying(Object.keys(data.versions || {}), range)
+ if (!ms) {
+ return cb(installTargetsError(range, data))
+ }
+ addNameVersion(name, ms, cb)
+ })
+}
+
+function installTargetsError (requested, data) {
+ var targets = Object.keys(data["dist-tags"]).filter(function (f) {
+ return (data.versions || {}).hasOwnProperty(f)
+ }).concat(Object.keys(data.versions || {}))
+
+ requested = data.name + (requested ? "@'" + requested + "'" : "")
+
+ targets = targets.length
+ ? "Valid install targets:\n" + JSON.stringify(targets)
+ : "No valid targets found.\n"
+ + "Perhaps not compatible with your version of node?"
+
+ return new Error( "No compatible version found: "
+ + requested + "\n" + targets)
+}
+
+function addNameVersion (name, ver, cb) {
+ ver = semver.valid(ver)
+ if (ver === null) return cb(new Error("Invalid version: "+ver))
+ registry.get(name, ver, function (er, data, json, response) {
+ if (er) return cb(er)
+ deprCheck(data)
+ var dist = data.dist
+
+ if (!dist) return cb(new Error("No dist in "+data._id+" package"))
+
+ var bd = npm.config.get("bindist")
+ , b = dist.bin && bd && dist.bin[bd]
+ log.verbose([bd, dist], "bin dist")
+ if (b && b.tarball && b.shasum) {
+ log.info(data._id, "prebuilt")
+ log.verbose(b, "prebuilt "+data._id)
+ dist = b
+ }
+
+ if (!dist.tarball) return cb(new Error(
+ "No dist.tarball in " + data._id + " package"))
+
+ if (response.statusCode !== 304 || npm.config.get("force")
+ || process.platform === "cygwin") {
+ return fetchit()
+ }
+
+ // we got cached data, so let's see if we have a tarball.
+ fs.stat(path.join(npm.cache, name, ver, "package.tgz"), function (er, s) {
+ if (!er) readJson( path.join( npm.cache, name, ver
+ , "package", "package.json" )
+ , function (er, data) {
+ if (er) return fetchit()
+ return cb(null, data)
+ })
+ else return fetchit()
+ })
+
+ function fetchit () {
+ // use the same protocol as the registry.
+ // https registry --> https tarballs.
+ var tb = url.parse(dist.tarball)
+ tb.protocol = url.parse(npm.config.get("registry")).protocol
+ delete tb.href
+ tb = url.format(tb)
+ return addRemoteTarball( tb
+ , dist.shasum
+ , name+"-"+ver
+ , cb )
+ }
+ })
+}
+
+function addLocal (p, name, cb_) {
+ if (typeof cb_ !== "function") cb_ = name, name = ""
+
+ function cb (er, data) {
+ if (er) {
+ // if it doesn't have a / in it, it might be a
+ // remote thing.
+ if (p.indexOf("/") === -1 && p.charAt(0) !== ".") {
+ return addNamed(p, "", cb_)
+ }
+ return log.er(cb_, "Could not install: "+p)(er)
+ }
+ return cb_(er, data)
+ }
+
+ // figure out if this is a folder or file.
+ fs.stat(p, function (er, s) {
+ if (er) return cb(er)
+ if (s.isDirectory()) addLocalDirectory(p, name, cb)
+ else addLocalTarball(p, name, cb)
+ })
+}
+
+function addLocalTarball (p, name, cb) {
+ if (typeof cb !== "function") cb = name, name = ""
+ // if it's a tar, and not in place,
+ // then unzip to .tmp, add the tmp folder, and clean up tmp
+ if (p.indexOf(npm.tmp) === 0) return addTmpTarball(p, name, cb)
+
+ if (p.indexOf(npm.cache) === 0) {
+ if (path.basename(p) !== "package.tgz") return cb(new Error(
+ "Not a valid cache tarball name: "+p))
+ return addPlacedTarball(p, name, cb)
+ }
+
+ // just copy it over and then add the temp tarball file.
+ var tmp = path.join(npm.tmp, name + Date.now()
+ + "-" + Math.random(), "tmp.tgz")
+ mkdir(path.dirname(tmp), function (er) {
+ if (er) return cb(er)
+ var from = fs.createReadStream(p)
+ , to = fs.createWriteStream(tmp)
+ , errState = null
+ function errHandler (er) {
+ if (errState) return
+ return cb(errState = er)
+ }
+ from.on("error", errHandler)
+ to.on("error", errHandler)
+ to.on("close", function () {
+ if (errState) return
+ log.verbose(npm.modes.file.toString(8), "chmod "+tmp)
+ fs.chmod(tmp, npm.modes.file, function (er) {
+ if (er) return cb(er)
+ addTmpTarball(tmp, name, cb)
+ })
+ })
+ from.pipe(to)
+ })
+}
+
+// to maintain the cache dir's permissions consistently.
+var cacheStat = null
+function getCacheStat (cb) {
+ if (cacheStat) return cb(null, cacheStat)
+ fs.stat(npm.cache, function (er, st) {
+ if (er) return makeCacheDir(cb)
+ if (!st.isDirectory()) {
+ return log.er(cb, "invalid cache directory: "+npm.cache)(er)
+ }
+ return cb(null, cacheStat = st)
+ })
+}
+
+function makeCacheDir (cb) {
+ if (!process.getuid) return mkdir(npm.cache, npm.modes.exec, cb)
+
+ var uid = +process.getuid()
+ , gid = +process.getgid()
+
+ if (uid === 0) {
+ if (process.env.SUDO_UID) uid = +process.env.SUDO_UID
+ if (process.env.SUDO_GID) gid = +process.env.SUDO_GID
+ }
+ if (uid !== 0 || !process.env.HOME) {
+ cacheStat = {uid: uid, gid: gid}
+ return mkdir(npm.cache, npm.modes.exec, uid, gid, function (er) {
+ return cb(er, cacheStat)
+ })
+ }
+ fs.stat(process.env.HOME, function (er, st) {
+ if (er) return log.er(cb, "homeless?")(er)
+ cacheStat = st
+ log.silly([st.uid, st.gid], "uid, gid for cache dir")
+ return mkdir(npm.cache, npm.modes.exec, st.uid, st.gid, function (er) {
+ return cb(er, cacheStat)
+ })
+ })
+}
+
+
+
+
+function addPlacedTarball (p, name, cb) {
+ if (!cb) cb = name, name = ""
+ getCacheStat(function (er, cs) {
+ if (er) return cb(er)
+ return addPlacedTarball_(p, name, cs.uid, cs.gid, cb)
+ })
+}
+
+function addPlacedTarball_ (p, name, uid, gid, cb) {
+ // now we know it's in place already as .cache/name/ver/package.tgz
+ // unpack to .cache/name/ver/package/, read the package.json,
+ // and fire cb with the json data.
+ var target = path.dirname(p)
+ , folder = path.join(target, "package")
+
+ rm(folder, function (er) {
+ if (er) return log.er(cb, "Could not remove "+folder)(er)
+ tar.unpack(p, folder, null, null, uid, gid, function (er) {
+ if (er) return log.er(cb, "Could not unpack "+p+" to "+target)(er)
+ // calculate the sha of the file that we just unpacked.
+ // this is so that the data is available when publishing.
+ sha.get(p, function (er, shasum) {
+ if (er) return log.er(cb, "couldn't validate shasum of "+p)(er)
+ readJson(path.join(folder, "package.json"), function (er, data) {
+ if (er) return log.er(cb, "couldn't read json in "+folder)(er)
+ data.dist = data.dist || {}
+ if (shasum) data.dist.shasum = shasum
+ deprCheck(data)
+ asyncMap([p], function (f, cb) {
+ log.verbose(npm.modes.file.toString(8), "chmod "+f)
+ fs.chmod(f, npm.modes.file, cb)
+ }, function (f, cb) {
+ if (process.platform === "win32") {
+ log.silly(f, "skipping chown for windows")
+ cb()
+ } else if (typeof uid === "number"
+ && typeof gid === "number"
+ && parseInt(uid, 10) === uid
+ && parseInt(gid, 10) === gid) {
+ log.verbose([f, uid, gid], "chown")
+ fs.chown(f, uid, gid, cb)
+ } else {
+ log.verbose([f, uid, gid], "not chowning, invalid uid/gid")
+ cb()
+ }
+ }, function (er) {
+ cb(er, data)
+ })
+ })
+ })
+ })
+ })
+}
+
+function addLocalDirectory (p, name, cb) {
+ if (typeof cb !== "function") cb = name, name = ""
+ // if it's a folder, then read the package.json,
+ // tar it to the proper place, and add the cache tar
+ if (p.indexOf(npm.cache) === 0) return cb(new Error(
+ "Adding a cache directory to the cache will make the world implode."))
+ readJson(path.join(p, "package.json"), function (er, data) {
+ if (er) return cb(er)
+ deprCheck(data)
+ var random = Date.now() + "-" + Math.random()
+ , tmp = path.join(npm.tmp, random)
+ , tmptgz = path.resolve(tmp, "tmp.tgz")
+ , placed = path.resolve( npm.cache, data.name
+ , data.version, "package.tgz" )
+ , placeDirect = path.basename(p) === "package"
+ , tgz = placeDirect ? placed : tmptgz
+ , doFancyCrap = p.indexOf(npm.tmp) !== 0
+ && p.indexOf(npm.cache) !== 0
+ tar.pack(tgz, p, data, doFancyCrap, function (er) {
+ if (er) return log.er(cb,"couldn't pack "+p+ " to "+tgz)(er)
+ addLocalTarball(tgz, name, cb)
+ })
+ })
+}
+
+function addTmpTarball (tgz, name, cb) {
+ if (!cb) cb = name, name = ""
+ getCacheStat(function (er, cs) {
+ if (er) return cb(er)
+ return addTmpTarball_(tgz, name, cs.uid, cs.gid, cb)
+ })
+}
+
+function addTmpTarball_ (tgz, name, uid, gid, cb) {
+ var contents = path.resolve(path.dirname(tgz)) // , "contents")
+ tar.unpack( tgz, path.resolve(contents, "package")
+ , null, null
+ , uid, gid
+ , function (er) {
+ if (er) return log.er(cb, "couldn't unpack "+tgz+" to "+contents)(er)
+ fs.readdir(contents, function (er, folder) {
+ if (er) return log.er(cb, "couldn't readdir "+contents)(er)
+ log.verbose(folder, "tarball contents")
+ if (folder.length > 1) {
+ folder = folder.filter(function (f) {
+ return !f.match(/^\.|^tmp\.tgz$/)
+ })
+ }
+ if (folder.length > 1) {
+ log.warn(folder.slice(1).join("\n")
+ ,"extra junk in folder, ignoring")
+ }
+ if (!folder.length) return cb(new Error("Empty package tarball"))
+ folder = path.join(contents, folder[0])
+ var newName = path.join(contents, "package")
+ fs.rename(folder, newName, function (er) {
+ if (er) return log.er(cb, "couldn't rename "+folder+" to package")(er)
+ addLocalDirectory(newName, name, cb)
+ })
+ })
+ })
+}
+
+function unpack (pkg, ver, unpackTarget, dMode, fMode, uid, gid, cb) {
+ if (typeof cb !== "function") cb = gid, gid = null
+ if (typeof cb !== "function") cb = uid, uid = null
+ if (typeof cb !== "function") cb = fMode, fMode = null
+ if (typeof cb !== "function") cb = dMode, dMode = null
+
+ read(pkg, ver, false, function (er, data) {
+ if (er) {
+ log.error("Could not read data for "+pkg+"@"+ver)
+ return cb(er)
+ }
+ tar.unpack( path.join(npm.cache, pkg, ver, "package.tgz")
+ , unpackTarget
+ , dMode, fMode
+ , uid, gid
+ , cb )
+ })
+}
+
+var deprecated = {}
+ , deprWarned = {}
+function deprCheck (data) {
+ if (deprecated[data._id]) data.deprecated = deprecated[data._id]
+ if (data.deprecated) deprecated[data._id] = data.deprecated
+ else return
+ if (!deprWarned[data._id]) {
+ deprWarned[data._id] = true
+ log.warn(data._id+": "+data.deprecated, "deprecated")
+ }
+}
diff --git a/deps/npm/lib/completion.js b/deps/npm/lib/completion.js
new file mode 100644
index 0000000000..bc97bf1af9
--- /dev/null
+++ b/deps/npm/lib/completion.js
@@ -0,0 +1,245 @@
+
+module.exports = completion
+
+completion.usage = "npm completion >> ~/.bashrc\n"
+ + "npm completion >> ~/.zshrc\n"
+ + "source <(npm completion)"
+
+var output = require("./utils/output.js")
+ , configDefs = require("./utils/config-defs.js")
+ , configTypes = configDefs.types
+ , shorthands = configDefs.shorthands
+ , nopt = require("nopt")
+ , configNames = Object.keys(configTypes).filter(function (e) {
+ return e.charAt(0) !== "_"
+ })
+ , shorthandNames = Object.keys(shorthands)
+ , allConfs = configNames.concat(shorthandNames)
+ , npm = require("./npm.js")
+
+completion.completion = function (opts, cb) {
+ if (opts.w > 3) return cb()
+
+ var fs = require("fs")
+ , path = require("path")
+ , bashExists = null
+ , zshExists = null
+ , bashProfExists = null
+ fs.stat(path.resolve(process.env.HOME, ".bashrc"), function (er, b) {
+ bashExists = !er
+ next()
+ })
+ fs.stat(path.resolve(process.env.HOME, ".zshrc"), function (er, b) {
+ zshExists = !er
+ next()
+ })
+ function next () {
+ if (zshExists === null || bashExists === null) return
+ var out = []
+ if (zshExists) out.push("~/.zshrc")
+ if (bashExists) out.push("~/.bashrc")
+ if (opts.w === 2) out = out.map(function (m) {
+ return [">>", m]
+ })
+ cb(null, out)
+ }
+}
+
+function completion (args, cb) {
+ if (process.platform === "win32") {
+ var e = new Error("npm completion not supported on windows")
+ e.code = "ENOTSUP"
+ e.errno = require("constants").ENOTSUP
+ return cb(e)
+ }
+
+ // if the COMP_* isn't in the env, then just dump the script.
+ if (process.env.COMP_CWORD === undefined
+ ||process.env.COMP_LINE === undefined
+ ||process.env.COMP_POINT === undefined
+ ) return dumpScript(cb)
+
+ console.error(process.env.COMP_CWORD)
+ console.error(process.env.COMP_LINE)
+ console.error(process.env.COMP_POINT)
+
+ //console.log("abracadabrasauce\nabracad cat monger")
+ //if (Math.random() * 3 < 1) console.log("man\\ bear\\ pig")
+ //else if (Math.random() * 3 < 1)
+ // console.log("porkchop\\ sandwiches\nporkman")
+ //else console.log("encephylophagy")
+
+ // get the partial line and partial word,
+ // if the point isn't at the end.
+ // ie, tabbing at: npm foo b|ar
+ var w = +process.env.COMP_CWORD
+ , words = args.map(unescape)
+ , word = words[w]
+ , line = process.env.COMP_LINE
+ , point = +process.env.COMP_POINT
+ , lineLength = line.length
+ , partialLine = line.substr(0, point)
+ , partialWords = words.slice(0, w)
+
+ // figure out where in that last word the point is.
+ var partialWord = args[w]
+ , i = partialWord.length
+ while (partialWord.substr(0, i) !== partialLine.substr(-1*i) && i > 0) {
+ i --
+ }
+ partialWord = unescape(partialWord.substr(0, i))
+ partialWords.push(partialWord)
+
+ var opts = { words : words
+ , w : w
+ , word : word
+ , line : line
+ , lineLength : line.length
+ , point : point
+ , partialLine : partialLine
+ , partialWords : partialWords
+ , partialWord : partialWord
+ , raw: args
+ }
+
+ cb = wrapCb(cb, opts)
+
+ console.error(opts)
+
+ if (partialWords.slice(0, -1).indexOf("--") === -1) {
+ if (word.charAt(0) === "-") return configCompl(opts, cb)
+ if (words[w - 1]
+ && words[w - 1].charAt(0) === "-"
+ && !isFlag(words[w - 1])) {
+ // awaiting a value for a non-bool config.
+ // don't even try to do this for now
+ console.error("configValueCompl")
+ return configValueCompl(opts, cb)
+ }
+ }
+
+ // try to find the npm command.
+ // it's the first thing after all the configs.
+ // take a little shortcut and use npm's arg parsing logic.
+ // don't have to worry about the last arg being implicitly
+ // boolean'ed, since the last block will catch that.
+ var parsed = opts.conf =
+ nopt(configTypes, shorthands, partialWords.slice(0, -1), 0)
+ // check if there's a command already.
+ console.error(parsed)
+ var cmd = parsed.argv.remain[1]
+ if (!cmd) return cmdCompl(opts, cb)
+
+ Object.keys(parsed).forEach(function (k) {
+ npm.config.set(k, parsed[k])
+ })
+
+ // at this point, if words[1] is some kind of npm command,
+ // then complete on it.
+ // otherwise, do nothing
+ cmd = npm.commands[cmd]
+ if (cmd && cmd.completion) return cmd.completion(opts, cb)
+
+ // nothing to do.
+ cb()
+}
+
+function dumpScript (cb) {
+ var fs = require("graceful-fs")
+ , path = require("path")
+ , p = path.resolve(__dirname, "utils/completion.sh")
+
+ fs.readFile(p, "utf8", function (er, d) {
+ if (er) return cb(er)
+ d = d.replace(/^\#\!.*?\n/, "")
+
+ process.stdout.write(d, function (n) { cb() })
+ process.stdout.on("error", function (er) {
+ // Darwin is a real dick sometimes.
+ //
+ // This is necessary because the "source" or "." program in
+ // bash on OS X closes its file argument before reading
+ // from it, meaning that you get exactly 1 write, which will
+ // work most of the time, and will always raise an EPIPE.
+ //
+ // Really, one should not be tossing away EPIPE errors, or any
+ // errors, so casually. But, without this, `. <(npm completion)`
+ // can never ever work on OS X.
+ if (er.errno === require("constants").EPIPE) er = null
+ cb(er)
+ })
+
+ })
+}
+
+function unescape (w) {
+ if (w.charAt(0) === "\"") return w.replace(/^"|"$/g, "")
+ else return w.replace(/\\ /g, " ")
+}
+
+function escape (w) {
+ if (!w.match(/\s+/)) return w
+ return "\"" + w + "\""
+}
+
+// The command should respond with an array. Loop over that,
+// wrapping quotes around any that have spaces, and writing
+// them to stdout. Use console.log, not the outfd config.
+// If any of the items are arrays, then join them with a space.
+// Ie, returning ["a", "b c", ["d", "e"]] would allow it to expand
+// to: "a", "b c", or "d" "e"
+function wrapCb (cb, opts) { return function (er, compls) {
+ if (!Array.isArray(compls)) compls = compls ? [compls] : []
+ compls = compls.map(function (c) {
+ if (Array.isArray(c)) c = c.map(escape).join(" ")
+ else c = escape(c)
+ return c
+ })
+ if (opts.partialWord) compls = compls.filter(function (c) {
+ return c.indexOf(opts.partialWord) === 0
+ })
+ console.error([er && er.stack, compls, opts.partialWord])
+ if (er || compls.length === 0) return cb(er)
+ output.write(compls.join("\n"), 1, false, cb)
+}}
+
+// the current word has a dash. Return the config names,
+// with the same number of dashes as the current word has.
+function configCompl (opts, cb) {
+ var word = opts.word
+ , split = word.match(/^(-+)((?:no-)*)(.*)$/)
+ , dashes = split[1]
+ , no = split[2]
+ , conf = split[3]
+ , confs = allConfs
+ , flags = configNames.filter(isFlag)
+ console.error(flags)
+
+ return cb(null, allConfs.map(function (c) {
+ return dashes + c
+ }).concat(flags.map(function (f) {
+ return dashes + (no || "no-") + f
+ })))
+}
+
+// expand with the valid values of various config values.
+// not yet implemented.
+function configValueCompl (opts, cb) {
+ console.error('configValue', opts)
+ return cb(null, [])
+}
+
+// check if the thing is a flag or not.
+function isFlag (word) {
+ // shorthands never take args.
+ var split = word.match(/^(-*)((?:no-)+)?(.*)$/)
+ , dashes = split[1]
+ , no = split[2]
+ , conf = split[3]
+ return no || configTypes[conf] === Boolean || shorthands[conf]
+}
+
+// complete against the npm commands
+function cmdCompl (opts, cb) {
+ return cb(null, npm.fullList)
+}
diff --git a/deps/npm/lib/config.js b/deps/npm/lib/config.js
new file mode 100644
index 0000000000..6781679f1d
--- /dev/null
+++ b/deps/npm/lib/config.js
@@ -0,0 +1,286 @@
+
+module.exports = config
+
+config.usage = "npm config set <key> <value>"
+ + "\nnpm config get <key>"
+ + "\nnpm config delete <key>"
+ + "\nnpm config list"
+ + "\nnpm config edit"
+
+var ini = require("./utils/ini.js")
+ , log = require("./utils/log.js")
+ , npm = require("./npm.js")
+ , exec = require("./utils/exec.js")
+ , fs = require("graceful-fs")
+ , dc
+ , output = require("./utils/output.js")
+ , types = require("./utils/config-defs.js").types
+
+config.completion = function (opts, cb) {
+ var argv = opts.conf.argv.remain
+ if (argv[1] !== "config") argv.unshift("config")
+ if (argv.length === 2) {
+ var cmds = ["get", "set", "delete", "ls", "rm", "edit"]
+ if (opts.partialWord !== "l") cmds.push("list")
+ return cb(null, cmds)
+ }
+
+ var action = argv[2]
+ switch (action) {
+ case "set":
+ // todo: complete with valid values, if possible.
+ if (argv.length > 3) return cb(null, [])
+ // fallthrough
+ case "get":
+ case "delete":
+ case "rm":
+ return cb(null, Object.keys(types))
+ case "edit":
+ case "list": case "ls":
+ return cb(null, [])
+ default: return cb(null, [])
+ }
+}
+
+// npm config set key value
+// npm config get key
+// npm config list
+function config (args, cb) {
+ var action = args.shift()
+ switch (action) {
+ case "set": return set(args[0], args[1], cb)
+ case "get": return get(args[0], cb)
+ case "delete": case "rm": case "del": return del(args[0], cb)
+ case "list": case "ls": return list(cb)
+ case "edit": return edit(cb)
+ default: return unknown(action, cb)
+ }
+}
+
+function edit (cb) {
+ var e = ini.get("editor")
+ , which = ini.get("global") ? "global" : "user"
+ , f = ini.get(which + "config")
+ , eol = process.platform === "win32" ? "\r\n" : "\n"
+ if (!e) return cb(new Error("No EDITOR config or environ set."))
+ ini.save(which, function (er) {
+ if (er) return cb(er)
+ fs.readFile(f, "utf8", function (er, data) {
+ if (er) data = ""
+ dc = dc || require("./utils/config-defs.js").defaults
+ data = [ ";;;;"
+ , "; npm "+(ini.get("global") ?
+ "globalconfig" : "userconfig")+" file"
+ , "; this is a simple ini-formatted file"
+ , "; lines that start with semi-colons are comments."
+ , "; read `npm help config` for help on the various options"
+ , ";;;;"
+ , ""
+ , data
+ ].concat( [ ";;;;"
+ , "; all options with default values"
+ , ";;;;"
+ ]
+ )
+ .concat(Object.keys(dc).map(function (k) {
+ return "; " + k + " = " + ini.unParseField(dc[k],k)
+ }))
+ .concat([""])
+ .join(eol)
+ fs.writeFile
+ ( f
+ , data
+ , "utf8"
+ , function (er) {
+ if (er) return cb(er)
+ exec(e, [f], function (er) {
+ if (er) return cb(er)
+ ini.resolveConfigs(function (er) {
+ if (er) return cb(er)
+ ini.save(which, cb)
+ })
+ })
+ }
+ )
+ })
+ })
+}
+
+function del (key, cb) {
+ if (!key) return cb(new Error("no key provided"))
+ ini.del(key)
+ ini.save(ini.get("global") ? "global" : "user", cb)
+}
+
+function set (key, val, cb) {
+ if (val === undefined) {
+ if (key.indexOf("=") !== -1) {
+ var k = key.split("=")
+ key = k.shift()
+ val = k.join("=")
+ } else {
+ val = ""
+ }
+ }
+ key = key.trim()
+ val = val.trim()
+ log("set "+key+" "+val, "config")
+ var where = ini.get("global") ? "global" : "user"
+ ini.set(key, val, where)
+ ini.save(where, cb)
+}
+
+function get (key, cb) {
+ if (!key) return list(cb)
+ if (key.charAt(0) === "_") {
+ return cb(new Error("---sekretz---"))
+ }
+ output.write(npm.config.get(key), cb)
+}
+
+function sort (a, b) {
+ return a > b ? 1 : -1
+}
+
+function reverse (a, b) {
+ return a > b ? -1 : 1
+}
+
+function list (cb) {
+ var msg = ""
+ , long = npm.config.get("long")
+
+ // cli configs.
+ // show any that aren't secret
+ var cli = ini.configList.list[ini.TRANS.cli]
+ , eol = process.platform === "win32" ? "\r\n" : "\n"
+ , cliKeys = Object.keys(cli).filter(function (k) {
+ return !(k.charAt(0) === "_" || types[k] !== types[k])
+ }).sort(function (a, b) {
+ return a > b ? 1 : -1
+ })
+ if (cliKeys.length) {
+ msg += "; cli configs" + eol
+ cliKeys.forEach(function (k) {
+ if (k === "argv") return
+ msg += k + " = " + JSON.stringify(cli[k]) + eol
+ })
+ msg += eol
+ }
+
+ // env configs
+ var env = ini.configList.list[ini.TRANS.env]
+ , envKeys = Object.keys(env).filter(function (k) {
+ return !(k.charAt(0) === "_" || types[k] !== types[k])
+ }).sort(function (a, b) {
+ return a > b ? 1 : -1
+ })
+ if (envKeys.length) {
+ msg += "; environment configs" + eol
+ envKeys.forEach(function (k) {
+ if (env[k] !== ini.get(k)) {
+ if (!long) return
+ msg += "; " + k + " = " + JSON.stringify(env[k])
+ + " (overridden)" + eol
+ } else msg += k + " = " + JSON.stringify(env[k]) + eol
+ })
+ msg += eol
+ }
+
+ // user config file
+ var uconf = ini.configList.list[ini.TRANS.user]
+ , uconfKeys = Object.keys(uconf).filter(function (k) {
+ return types[k] === types[k]
+ }).sort(function (a, b) {
+ return a > b ? 1 : -1
+ })
+ if (uconfKeys.length) {
+ msg += "; userconfig " + ini.get("userconfig") + eol
+ uconfKeys.forEach(function (k) {
+ var val = (k.charAt(0) === "_")
+ ? "---sekretz---"
+ : JSON.stringify(uconf[k])
+ if (uconf[k] !== ini.get(k)) {
+ if (!long) return
+ msg += "; " + k + " = " + val
+ + " (overridden)" + eol
+ } else msg += k + " = " + val + eol
+ })
+ msg += eol
+ }
+
+ // global config file
+ var gconf = ini.configList.list[ini.TRANS.global]
+ , gconfKeys = Object.keys(gconf).filter(function (k) {
+ return types[k] === types[k]
+ }).sort(function (a, b) {
+ return a > b ? 1 : -1
+ })
+ if (gconfKeys.length) {
+ msg += "; globalconfig " + ini.get("globalconfig") + eol
+ gconfKeys.forEach(function (k) {
+ var val = (k.charAt(0) === "_")
+ ? "---sekretz---"
+ : JSON.stringify(gconf[k])
+ if (gconf[k] !== ini.get(k)) {
+ if (!long) return
+ msg += "; " + k + " = " + val
+ + " (overridden)" + eol
+ } else msg += k + " = " + val + eol
+ })
+ msg += eol
+ }
+
+ // builtin config file
+ var bconf = ini.configList.list[ini.TRANS.builtin]
+ , bconfKeys = Object.keys(bconf).filter(function (k) {
+ return types[k] === types[k]
+ }).sort(function (a, b) {
+ return a > b ? 1 : -1
+ })
+ if (bconfKeys.length) {
+ var path = require("path")
+ msg += "; builtin config " + path.resolve(__dirname, "../npmrc") + eol
+ bconfKeys.forEach(function (k) {
+ var val = (k.charAt(0) === "_")
+ ? "---sekretz---"
+ : JSON.stringify(bconf[k])
+ if (bconf[k] !== ini.get(k)) {
+ if (!long) return
+ msg += "; " + k + " = " + val
+ + " (overridden)" + eol
+ } else msg += k + " = " + val + eol
+ })
+ msg += eol
+ }
+
+ // only show defaults if --long
+ if (!long) {
+ msg += "; node install prefix = " + process.installPrefix + eol
+ + "; node bin location = " + process.execPath + eol
+ + "; cwd = " + process.cwd() + eol
+ + "; HOME = " + process.env.HOME + eol
+ + "; 'npm config ls -l' to show all defaults." + eol
+
+ return output.write(msg, cb)
+ }
+
+ var defaults = ini.defaultConfig
+ , defKeys = Object.keys(defaults)
+ msg += "; default values" + eol
+ defKeys.forEach(function (k) {
+ var val = JSON.stringify(defaults[k])
+ if (defaults[k] !== ini.get(k)) {
+ if (!long) return
+ msg += "; " + k + " = " + val
+ + " (overridden)" + eol
+ } else msg += k + " = " + val + eol
+ })
+ msg += eol
+
+ return output.write(msg, cb)
+}
+
+function unknown (action, cb) {
+ cb("Usage:\n" + config.usage)
+}
diff --git a/deps/npm/lib/deprecate.js b/deps/npm/lib/deprecate.js
new file mode 100644
index 0000000000..6f18879e4d
--- /dev/null
+++ b/deps/npm/lib/deprecate.js
@@ -0,0 +1,48 @@
+
+module.exports = deprecate
+
+deprecate.usage = "npm deprecate <pkg>[@<version>] <message>"
+
+deprecate.completion = function (opts, cb) {
+ // first, get a list of remote packages this user owns.
+ // once we have a user account, then don't complete anything.
+ var un = npm.config.get("username")
+ if (!npm.config.get("username")) return cb()
+ if (opts.conf.argv.remain.length > 2) return cb()
+ // get the list of packages by user
+ var uri = "/-/by-user/"+encodeURIComponent(un)
+ registry.get(uri, null, 60000, function (er, list) {
+ if (er) return cb()
+ console.error(list)
+ return cb(null, list[un])
+ })
+}
+
+var registry = require("./utils/npm-registry-client/index.js")
+ , semver = require("semver")
+ , log = require("./utils/log.js")
+ , npm = require("./npm.js")
+
+function deprecate (args, cb) {
+ var pkg = args[0]
+ , msg = args[1]
+ if (msg === undefined) return cb(new Error(deprecate.usage))
+ // fetch the data and make sure it exists.
+ pkg = pkg.split(/@/)
+ var name = pkg.shift()
+ , ver = pkg.join("@")
+ if (semver.validRange(ver) === null) {
+ return cb(new Error("invalid version range: "+ver))
+ }
+ registry.get(name, function (er, data) {
+ if (er) return cb(er)
+ // filter all the versions that match
+ Object.keys(data.versions).filter(function (v) {
+ return semver.satisfies(v, ver)
+ }).forEach(function (v) {
+ data.versions[v].deprecated = msg
+ })
+ // now update the doc on the registry
+ registry.request.PUT(data._id, data, cb)
+ })
+}
diff --git a/deps/npm/lib/docs.js b/deps/npm/lib/docs.js
new file mode 100644
index 0000000000..de9f71c654
--- /dev/null
+++ b/deps/npm/lib/docs.js
@@ -0,0 +1,46 @@
+
+module.exports = docs
+
+docs.usage = "npm docs <pkgname>"
+
+docs.completion = function (opts, cb) {
+ if (opts.conf.argv.remain.length > 2) return cb()
+ registry.get("/-/short", null, 60000, function (er, list) {
+ return cb(null, list || [])
+ })
+}
+
+var exec = require("./utils/exec.js")
+ , registry = require("./utils/npm-registry-client/index.js")
+ , npm = require("./npm.js")
+ , log = require("./utils/log.js")
+
+function docs (args, cb) {
+ if (!args.length) return cb(docs.usage)
+ var n = args[0].split("@").shift()
+ registry.get(n, "latest", 3600, function (er, d) {
+ if (er) return cb(er)
+ var homepage = d.homepage
+ , repo = d.repository || d.repositories
+ if (homepage) return open(homepage, cb)
+ if (repo) {
+ if (Array.isArray(repo)) repo = repo.shift()
+ if (repo.url) repo = repo.url
+ log.verbose(repo, "repository")
+ if (repo) {
+ return open(repo.replace(/^git(@|:\/\/)/, 'http://')
+ .replace(/\.git$/, '')+"#readme", cb)
+ }
+ }
+ return open("http://search.npmjs.org/#/" + d.name, cb)
+ })
+}
+
+function open (url, cb) {
+ exec(npm.config.get("browser"), [url], log.er(cb,
+ "Failed to open "+url+" in a browser. It could be that the\n"+
+ "'browser' config is not set. Try doing this:\n"+
+ " npm config set browser google-chrome\n"+
+ "or:\n"+
+ " npm config set browser lynx\n"))
+}
diff --git a/deps/npm/lib/edit.js b/deps/npm/lib/edit.js
new file mode 100644
index 0000000000..df103d4df4
--- /dev/null
+++ b/deps/npm/lib/edit.js
@@ -0,0 +1,31 @@
+// npm edit <pkg>[@<version>]
+// open the package folder in the $EDITOR
+
+module.exports = edit
+edit.usage = "npm edit <pkg>"
+
+edit.completion = require("./utils/completion/installed-shallow.js")
+
+var npm = require("./npm.js")
+ , exec = require("./utils/exec.js")
+ , path = require("path")
+ , fs = require("graceful-fs")
+ , log = require("./utils/log.js")
+
+function edit (args, cb) {
+ var p = args[0]
+ if (args.length !== 1 || !p) return cb(edit.usage)
+ var editor = npm.config.get("editor")
+ if (!editor) return cb(new Error(
+ "No editor set. Set the 'editor' config, or $EDITOR environ."))
+ p = p.split("/")
+ .join("/node_modules/")
+ .replace(/(\/node_modules)+/, "/node_modules")
+ fs.lstat(path.resolve(npm.dir, p), function (er) {
+ if (er) return cb(er)
+ exec(editor, [path.resolve(npm.dir, p)], function (er) {
+ if (er) return cb(er)
+ npm.commands.rebuild(args, cb)
+ })
+ })
+}
diff --git a/deps/npm/lib/explore.js b/deps/npm/lib/explore.js
new file mode 100644
index 0000000000..8392c91728
--- /dev/null
+++ b/deps/npm/lib/explore.js
@@ -0,0 +1,34 @@
+// npm expore <pkg>[@<version>]
+// open a subshell to the package folder.
+
+module.exports = explore
+explore.usage = "npm explore <pkg> [ -- <cmd>]"
+explore.completion = require("./utils/completion/installed-shallow.js")
+
+var npm = require("./npm.js")
+ , exec = require("./utils/exec.js")
+ , path = require("path")
+ , fs = require("graceful-fs")
+
+function explore (args, cb) {
+ if (args.length < 1 || !args[0]) return cb(explore.usage)
+ var p = args.shift()
+ args = args.join(" ").trim()
+ if (args) args = ["-c", args]
+ else args = []
+
+ var editor = npm.config.get("editor")
+ , cwd = path.resolve(npm.dir, p)
+ fs.stat(cwd, function (er, s) {
+ if (er || !s.isDirectory()) return cb(new Error(
+ "It doesn't look like "+p+" is installed."))
+ if (!args.length) console.log(
+ "\nExploring "+cwd+"\n"+
+ "Type 'exit' or ^D when finished\n")
+ exec(npm.config.get("shell"), args, null, true, cwd, function (er) {
+ // only fail if non-interactive.
+ if (!args.length) return cb()
+ cb(er)
+ })
+ })
+}
diff --git a/deps/npm/lib/faq.js b/deps/npm/lib/faq.js
new file mode 100644
index 0000000000..912db0072f
--- /dev/null
+++ b/deps/npm/lib/faq.js
@@ -0,0 +1,8 @@
+
+module.exports = faq
+
+faq.usage = "npm faq"
+
+var npm = require("./npm.js")
+
+function faq (args, cb) { npm.commands.help(["faq"], cb) }
diff --git a/deps/npm/lib/get.js b/deps/npm/lib/get.js
new file mode 100644
index 0000000000..aa058002ec
--- /dev/null
+++ b/deps/npm/lib/get.js
@@ -0,0 +1,12 @@
+
+module.exports = get
+
+get.usage = "npm get <key> <value> (See `npm config`)"
+
+var npm = require("./npm.js")
+
+get.completion = npm.commands.config.completion
+
+function get (args, cb) {
+ npm.commands.config(["get"].concat(args), cb)
+}
diff --git a/deps/npm/lib/help-search.js b/deps/npm/lib/help-search.js
new file mode 100644
index 0000000000..a4ef667c08
--- /dev/null
+++ b/deps/npm/lib/help-search.js
@@ -0,0 +1,183 @@
+
+module.exports = helpSearch
+
+var fs = require("graceful-fs")
+ , output = require("./utils/output.js")
+ , path = require("path")
+ , asyncMap = require("slide").asyncMap
+ , cliDocsPath = path.join(__dirname, "..", "doc", "cli")
+ , apiDocsPath = path.join(__dirname, "..", "doc", "api")
+ , log = require("./utils/log.js")
+ , npm = require("./npm.js")
+
+helpSearch.usage = "npm help-search <text>"
+
+function helpSearch (args, silent, cb) {
+ if (typeof cb !== "function") cb = silent, silent = false
+ if (!args.length) return cb(helpSearch.usage)
+
+ // see if we're actually searching the api docs.
+ var argv = npm.config.get("argv").cooked
+ , docsPath = cliDocsPath
+ , cmd = "help"
+ if (argv.length && argv[0].indexOf("api") !== -1) {
+ docsPath = apiDocsPath
+ cmd = "apihelp"
+ }
+
+ fs.readdir(docsPath, function(er, files) {
+ if (er) return log.er(cb, "Could not load documentation")(er)
+
+ var search = args.join(" ")
+ , results = []
+ asyncMap(files, function (file, cb) {
+ fs.lstat(path.resolve(docsPath, file), function (er, st) {
+ if (er) return cb(er)
+ if (!st.isFile()) return cb(null, [])
+
+ fs.readFile(path.resolve(docsPath, file), "utf8", function (er, data) {
+ if (er) return cb(er)
+
+ var match = false
+ for (var a = 0, l = args.length; a < l && !match; a ++) {
+ match = data.toLowerCase().indexOf(args[a].toLowerCase()) !== -1
+ }
+ if (!match) return cb(null, [])
+
+ var lines = data.split(/\n+/)
+ , context = []
+
+ // if a line has a search term, then skip it and the next line.
+ // if the next line has a search term, then skip all 3
+ // otherwise, set the line to null.
+ for (var i = 0, l = lines.length; i < l; i ++) {
+ var line = lines[i]
+ , nextLine = lines[i + 1]
+ , match = false
+ if (nextLine) {
+ for (var a = 0, ll = args.length; a < ll && !match; a ++) {
+ match = nextLine.toLowerCase()
+ .indexOf(args[a].toLowerCase()) !== -1
+ }
+ if (match) {
+ // skip over the next line, and the line after it.
+ i += 2
+ continue
+ }
+ }
+
+ match = false
+ for (var a = 0, ll = args.length; a < ll && !match; a ++) {
+ match = line.toLowerCase().indexOf(args[a].toLowerCase()) !== -1
+ }
+ if (match) {
+ // skip over the next line
+ i ++
+ continue
+ }
+
+ lines[i] = null
+ }
+
+ // now squish any string of nulls into a single null
+ lines = lines.reduce(function (l, r) {
+ if (!(r === null && l[l.length-1] === null)) l.push(r)
+ return l
+ }, [])
+
+ if (lines[lines.length - 1] === null) lines.pop()
+ if (lines[0] === null) lines.shift()
+
+ // now see how many args were found at all.
+ var found = {}
+ , totalHits = 0
+ lines.forEach(function (line) {
+ args.forEach(function (arg) {
+ var hit = (line || "").toLowerCase()
+ .split(arg.toLowerCase()).length - 1
+ if (hit > 0) {
+ found[arg] = (found[arg] || 0) + hit
+ totalHits += hit
+ }
+ })
+ })
+
+ return cb(null, { file: file, lines: lines, found: Object.keys(found)
+ , hits: found, totalHits: totalHits })
+ })
+ })
+ }, function (er, results) {
+ if (er) return cb(er)
+
+ // if only one result, then just show that help section.
+ if (results.length === 1) {
+ return npm.commands.help([results[0].file.replace(/\.md$/, "")], cb)
+ }
+
+ if (results.length === 0) {
+ return output.write("No results for "
+ + args.map(JSON.stringify).join(" "), cb)
+ }
+
+ // sort results by number of results found, then by number of hits
+ // then by number of matching lines
+ results = results.sort(function (a, b) {
+ return a.found.length > b.found.length ? -1
+ : a.found.length < b.found.length ? 1
+ : a.totalHits > b.totalHits ? -1
+ : a.totalHits < b.totalHits ? 1
+ : a.lines.length > b.lines.length ? -1
+ : a.lines.length < b.lines.length ? 1
+ : 0
+ })
+
+ var out = results.map(function (res, i, results) {
+ var out = "npm " + cmd + " "+res.file.replace(/\.md$/, "")
+ , r = Object.keys(res.hits).map(function (k) {
+ return k + ":" + res.hits[k]
+ }).sort(function (a, b) {
+ return a > b ? 1 : -1
+ }).join(" ")
+
+ out += ((new Array(Math.max(1, 81 - out.length - r.length)))
+ .join (" ")) + r
+
+ if (!npm.config.get("long")) return out
+
+ var out = "\n\n" + out
+ + "\n" + (new Array(81)).join("—") + "\n"
+ + res.lines.map(function (line, i) {
+ if (line === null || i > 3) return ""
+ for (var out = line, a = 0, l = args.length; a < l; a ++) {
+ var finder = out.toLowerCase().split(args[a].toLowerCase())
+ , newOut = []
+ , p = 0
+ finder.forEach(function (f) {
+ newOut.push( out.substr(p, f.length)
+ , "\1"
+ , out.substr(p + f.length, args[a].length)
+ , "\2" )
+ p += f.length + args[a].length
+ })
+ out = newOut.join("")
+ }
+ out = out.split("\1").join("\033[31;40m")
+ .split("\2").join("\033[0m")
+ return out
+ }).join("\n").trim()
+ return out
+ }).join("\n")
+
+ if (results.length && !npm.config.get("long")) {
+ out = "Top hits for "+(args.map(JSON.stringify).join(" "))
+ + "\n" + (new Array(81)).join("—") + "\n"
+ + out
+ + "\n" + (new Array(81)).join("—") + "\n"
+ + "(run with -l or --long to see more context)"
+ }
+
+ output.write(out.trim(), function (er) { cb(er, results) })
+ })
+
+ })
+}
diff --git a/deps/npm/lib/help.js b/deps/npm/lib/help.js
new file mode 100644
index 0000000000..854a7d6bab
--- /dev/null
+++ b/deps/npm/lib/help.js
@@ -0,0 +1,150 @@
+
+module.exports = help
+
+help.completion = function (opts, cb) {
+ if (opts.conf.argv.remain.length > 2) return cb(null, [])
+ var num = 1
+ if (-1 !== opts.conf.argv.remain[1].indexOf("api")) num = 3
+ getSections(num, cb)
+}
+
+var fs = require("graceful-fs")
+ , path = require("path")
+ , exec = require("./utils/exec.js")
+ , npm = require("./npm.js")
+ , output = require("./utils/output.js")
+ , log = require("./utils/log.js")
+
+function help (args, cb) {
+ var num = 1
+ , argv = npm.config.get("argv").cooked
+ if (argv.length && -1 !== argv[0].indexOf("api")) {
+ num = 3
+ }
+
+ if (args.length > 1 && args[0]) {
+ return npm.commands["help-search"](args, num, cb)
+ }
+
+ var section = args[0]
+
+ if (section) {
+ if ( npm.config.get("usage")
+ && npm.commands[section]
+ && npm.commands[section].usage
+ ) {
+ npm.config.set("loglevel", "silent")
+ return output.write(npm.commands[section].usage, cb)
+ }
+ var sectionPath = path.join( __dirname, "..", "man", "man" + num
+ , section + "." + num)
+ , htmlPath = path.resolve( __dirname, "..", "html"
+ , num === 3 ? "api" : "doc"
+ , section+".html" )
+ return fs.stat
+ ( sectionPath
+ , function (e, o) {
+ if (e) return npm.commands["help-search"](args, cb)
+
+ var manpath = path.join(__dirname, "..", "man")
+ , env = {}
+ Object.keys(process.env).forEach(function (i) {
+ env[i] = process.env[i]
+ })
+ env.MANPATH = manpath
+ var viewer = npm.config.get("viewer")
+ switch (viewer) {
+ case "woman":
+ var a = ["-e", "(woman-find-file \"" + sectionPath + "\")"]
+ exec("emacsclient", a, env, true, cb)
+ break
+ case "browser":
+ var b = npm.config.get("browser")
+ if (!b) {
+ return cb(new Error("viewer=browser and no browser set."))
+ }
+ output.write("Opening HTML in default browser...", cb)
+ // windows is SO weird.
+ if (process.platform === "win32") {
+ exec("cmd", ["/c", htmlPath], env, false, function () {})
+ } else {
+ exec(b, [htmlPath], env, false, function () {})
+ }
+ break
+ default:
+ exec("man", [num, section], env, true, cb)
+ }
+ }
+ )
+ } else getSections(function (er, sections) {
+ if (er) return cb(er)
+ npm.config.set("loglevel", "silent")
+ output.write
+ ( ["\nUsage: npm <command>"
+ , ""
+ , "where <command> is one of:"
+ , npm.config.get("long") ? usages()
+ : " " + wrap(Object.keys(npm.commands))
+ , ""
+ , "npm <cmd> -h quick help on <cmd>"
+ , "npm -l display full usage info"
+ , "npm faq commonly asked questions"
+ , "npm help <term> search for help on <term>"
+ , "npm help npm involved overview"
+ , ""
+ , "Specify configs in the ini-formatted file:"
+ , " " + npm.config.get("userconfig")
+ , "or on the command line via: npm <command> --key value"
+ , "Config info can be viewed via: npm help config"
+ ].join("\n"), function () { cb(er) })
+ })
+}
+
+function usages () {
+ // return a string of <cmd>: <usage>
+ var maxLen = 0
+ return Object.keys(npm.commands).filter(function (c) {
+ return c === npm.deref(c)
+ }).reduce(function (set, c) {
+ set.push([c, npm.commands[c].usage || ""])
+ maxLen = Math.max(maxLen, c.length)
+ return set
+ }, []).map(function (item) {
+ var c = item[0]
+ , usage = item[1]
+ return "\n " + c + (new Array(maxLen - c.length + 2).join(" "))
+ + (usage.split("\n")
+ .join("\n" + (new Array(maxLen + 6).join(" "))))
+ }).join("\n")
+ return out
+}
+
+
+function wrap (arr) {
+ var out = ['']
+ , l = 0
+ arr.sort(function (a,b) { return a<b?-1:1 })
+ .forEach(function (c) {
+ if (out[l].length + c.length + 2 < 60) {
+ out[l] += ', '+c
+ } else {
+ out[l++] += ','
+ out[l] = c
+ }
+ })
+ return out.join("\n ").substr(2)
+}
+
+function getSections (num, cb) {
+ if (typeof cb !== "function") cb = num, num = 1
+
+ var mp = path.join(__dirname, "../man/man" + num + "/")
+ , cleaner = new RegExp("\\." + num + "$")
+ fs.readdir(mp, function (er, files) {
+ if (er) return cb(er)
+ var sectionList = files.concat("help." + num)
+ .filter(function (s) { return s.match(cleaner) })
+ .map(function (s) { return s.replace(cleaner, "")})
+ cb(null, sectionList)
+ })
+}
diff --git a/deps/npm/lib/init.js b/deps/npm/lib/init.js
new file mode 100644
index 0000000000..950cb45885
--- /dev/null
+++ b/deps/npm/lib/init.js
@@ -0,0 +1,240 @@
+
+// initialize a package.json file
+
+module.exports = init
+
+var prompt = require("./utils/prompt.js")
+ , path = require("path")
+ , readJson = require("./utils/read-json.js")
+ , fs = require("graceful-fs")
+ , promiseChain = require("./utils/promise-chain.js")
+ , exec = require("./utils/exec.js")
+ , semver = require("semver")
+ , log = require("./utils/log.js")
+ , npm = require("./npm.js")
+ , output = require("./utils/output.js")
+
+init.usage = "npm init [folder]"
+
+function init (args, cb) {
+ var folder = args[0] || "."
+ , ll = npm.config.get("loglevel")
+ npm.config.set("loglevel", "paused")
+ if (folder.charAt(0) !== "/") folder = path.join(process.cwd(), folder)
+
+ readJson(path.join(folder, "package.json"), function (er, data) {
+ if (er) data = {}
+
+ data.author = data.author ||
+ { name: npm.config.get("init.author.name")
+ , email: npm.config.get("init.author.email")
+ , url: npm.config.get("init.author.url") }
+
+ init_(data, folder, function (er) {
+ npm.config.set("loglevel", ll)
+ if (!er) log(path.resolve(folder, "package.json"), "written")
+ cb(er)
+ })
+ })
+}
+
+function init_ (data, folder, cb) {
+ var nv = npm.config.get("node-version")
+ , p = semver.parse(nv)
+ , eng = ""
+
+ if (!p[5]) eng = "~" + nv
+ else eng = "~" + [p[1], p[2], p[3]].join(".") + " || " + nv
+
+ // node version 0.n is api-compatible with 0.(n+1) when n is odd.
+ if (p[2] % 2) {
+ eng += " || " + [p[1], +(p[2]) + 1].join(".")
+ }
+
+
+ output.write(
+ ["This utility will walk you through creating a package.json file."
+ ,"It only covers the most common items, and tries to guess sane defaults."
+ ,""
+ ,"See `npm help json` for definitive documentation on these fields"
+ ,"and exactly what they do."
+ ,""
+ ,"Use `npm install <pkg> --save` afterwards to install a package and"
+ ,"save it as a dependency in the package.json file."
+ ,""
+ ,"Press ^C at any time to quit."
+ ,""
+ ].join("\n"))
+ promiseChain(cb)
+ ( prompt
+ , ["Package name: ", defaultName(folder, data)]
+ , function (n) { data.name = n }
+ )
+ ( prompt
+ , ["Description: ", data.description]
+ , function (d) { data.description = d }
+ )
+ ( defaultVersion, [folder, data], function (v) { data.version = v } )
+ (function (cb) {
+ prompt("Package version: ", data.version, function (er, v) {
+ if (er) return cb(er)
+ data.version = v
+ cb()
+ })
+ }, [])
+ ( prompt
+ , ["Project homepage: ", data.homepage || data.url || "none"]
+ , function (u) {
+ if (u === "none") return
+ data.homepage = u
+ delete data.url
+ }
+ )
+ ( defaultRepo, [folder, data], function (r) { data.repository = r } )
+ (function (cb) {
+ prompt( "Project git repository: "
+ , data.repository && data.repository.url || "none"
+ , function (er, r) {
+ if (er) return cb(er)
+ if (r !== "none") {
+ data.repository = (data.repository || {}).url = r
+ }
+ cb()
+ }
+ )
+ }, [])
+ ( prompt
+ , ["Author name: ", data.author && data.author.name]
+ , function (n) {
+ if (!n) return
+ (data.author = data.author || {}).name = n
+ }
+ )
+ ( prompt
+ , ["Author email: ", data.author && data.author.email || "none"]
+ , function (n) {
+ if (n === "none") return
+ (data.author = data.author || {}).email = n
+ }
+ )
+ ( prompt
+ , ["Author url: ", data.author && data.author.url || "none"]
+ , function (n) {
+ if (n === "none") return
+ (data.author = data.author || {}).url = n
+ }
+ )
+ ( prompt
+ , ["Main module/entry point: ", data.main || "none"]
+ , function (m) {
+ if (m === "none") {
+ delete data.main
+ return
+ }
+ data.main = m
+ }
+ )
+ ( prompt
+ , ["Test command: ", data.scripts && data.scripts.test || "none"]
+ , function (t) {
+ if (t === "none") return
+ (data.scripts = data.scripts || {}).test = t
+ }
+ )
+ ( prompt
+ , [ "What versions of node does it run on? "
+ , data.engines && data.engines.node
+ || (eng)
+ ]
+ , function (nodever) {
+ (data.engines = data.engines || {}).node = nodever
+ }
+ )
+ (cleanupPaths, [data, folder])
+ (function (cb) {
+ try { data = readJson.processJson(data) }
+ catch (er) { return cb(er) }
+ Object.keys(data)
+ .filter(function (k) { return k.match(/^_/) })
+ .forEach(function (k) { delete data[k] })
+ readJson.unParsePeople(data)
+ var str = JSON.stringify(data, null, 2)
+ , msg = "About to write to "
+ + path.join(folder, "package.json")
+ + "\n\n"
+ + str
+ + "\n\n"
+ output.write(msg, cb)
+ })
+ (function (cb) {
+ prompt("\nIs this ok? ", "yes", function (er, ok) {
+ if (er) return cb(er)
+ if (ok.toLowerCase().charAt(0) !== "y") {
+ return cb(new Error("cancelled"))
+ }
+ return cb()
+ })
+ })
+ (function (cb) {
+ fs.writeFile( path.join(folder, "package.json")
+ , JSON.stringify(data, null, 2) + "\n"
+ , cb )
+ })
+ ()
+}
+
+// sync - no io
+function defaultName (folder, data) {
+ if (data.name) return data.name
+ return path.basename(folder)
+ .replace(/^node[-\._]?|([-\._]node)[-\._]?(js)?$/g, "")
+}
+
+function defaultVersion (folder, data, cb) {
+ if (data.version) return cb(null, data.version)
+ exec("git", ["describe", "--tags"], process.env, false, folder,
+ function (er, code, out) {
+ out = (out || "").trim()
+ if (semver.valid(out)) return cb(null, out)
+ out = npm.config.get("init.version")
+ if (semver.valid(out)) return cb(null, out)
+ return cb(null, "0.0.0")
+ })
+}
+
+function defaultRepo (folder, data, cb) {
+ if (data.repository) return cb(null, data.repository)
+ exec( "git", ["remote", "-v"], process.env, false, folder
+ , function (er, code, out) {
+ out = (out || "")
+ .trim()
+ .split("\n").filter(function (line) {
+ return line.search(/^origin/) !== -1
+ })[0]
+ if (!out) return cb(null, {})
+ var repo =
+ { type: "git"
+ , url: out.split(/\s/)[1]
+ .replace("git@github.com:", "git://github.com/")
+ }
+ return cb(null, repo)
+ })
+}
+
+function cleanupPaths (data, folder, cb) {
+ if (data.main) {
+ data.main = cleanupPath(data.main, folder)
+ }
+ var dirs = data.directories
+ if (dirs) {
+ Object.keys(dirs).forEach(function (dir) {
+ dirs[dir] = cleanupPath(dirs[dir], folder)
+ })
+ }
+ cb()
+}
+
+function cleanupPath (m, folder) {
+ if (m.indexOf(folder) === 0) m = path.join(".", m.substr(folder.length))
+ return m
+}
diff --git a/deps/npm/lib/install.js b/deps/npm/lib/install.js
new file mode 100644
index 0000000000..4d71b9a512
--- /dev/null
+++ b/deps/npm/lib/install.js
@@ -0,0 +1,608 @@
+
+// npm install <pkg> <pkg> <pkg>
+//
+// See doc/install.md for more description
+
+// Managing "family" lists...
+// every time we dive into a deeper node_modules folder, the "family"
+// list that gets passed along uses the previous "family" list as
+// it's __proto__. Any "resolved precise dependency" things that aren't
+// already on this object get added, and then that's passed to the next
+// generation of installation.
+
+module.exports = install
+
+install.usage = "npm install <tarball file>"
+ + "\nnpm install <tarball url>"
+ + "\nnpm install <folder>"
+ + "\nnpm install <pkg>"
+ + "\nnpm install <pkg>@<tag>"
+ + "\nnpm install <pkg>@<version>"
+ + "\nnpm install <pkg>@<version range>"
+ + "\n\nCan specify one or more: npm install ./foo.tgz bar@stable /some/folder"
+ + "\nInstalls dependencies in ./package.json if no argument supplied"
+
+install.completion = function (opts, cb) {
+ // install can complete to a folder with a package.json, or any package.
+ // if it has a slash, then it's gotta be a folder
+ // if it starts with https?://, then just give up, because it's a url
+ // for now, not yet implemented.
+ var registry = require("./utils/npm-registry-client/index.js")
+ registry.get("/-/short", function (er, pkgs) {
+ if (er) return cb()
+ if (!opts.partialWord) return cb(null, pkgs)
+
+ var name = opts.partialWord.split("@").shift()
+ pkgs = pkgs.filter(function (p) {
+ return p.indexOf(name) === 0
+ })
+
+ if (pkgs.length !== 1 && opts.partialWord === name) {
+ return cb(null, pkgs)
+ }
+
+ registry.get(pkgs[0], function (er, d) {
+ if (er) return cb()
+ return cb(null, Object.keys(d["dist-tags"] || {})
+ .concat(Object.keys(d.versions || {}))
+ .map(function (t) {
+ return pkgs[0] + "@" + t
+ }))
+ })
+ })
+}
+
+var npm = require("./npm.js")
+ , semver = require("semver")
+ , readJson = require("./utils/read-json.js")
+ , log = require("./utils/log.js")
+ , path = require("path")
+ , fs = require("graceful-fs")
+ , cache = require("./cache.js")
+ , asyncMap = require("slide").asyncMap
+ , chain = require("slide").chain
+ , relativize = require("./utils/relativize.js")
+ , output
+ , url = require("url")
+ , mkdir = require("./utils/mkdir-p.js")
+ , lifecycle = require("./utils/lifecycle.js")
+
+function install (args, cb_) {
+
+ function cb (er, installed) {
+ if (er) return cb_(er)
+
+ output = output || require("./utils/output.js")
+
+ var tree = treeify(installed)
+ , pretty = prettify(tree, installed)
+
+ output.write(pretty, function (er) {
+ if (er) return cb_(er)
+ save(where, installed, tree, pretty, cb_)
+ })
+ }
+
+ // the /path/to/node_modules/..
+ var where = path.resolve(npm.dir, "..")
+
+ // internal api: install(where, what, cb)
+ if (arguments.length === 3) {
+ where = args
+ args = [].concat(cb_) // pass in [] to do default dep-install
+ cb_ = arguments[2]
+ log.verbose([where, args], "install(where, what)")
+ }
+
+ if (!npm.config.get("global")) {
+ args = args.filter(function (a) {
+ return path.resolve(a) !== where
+ })
+ }
+
+ mkdir(where, function (er) {
+ if (er) return cb(er)
+ // install dependencies locally by default,
+ // or install current folder globally
+ if (!args.length) {
+ if (npm.config.get("global")) args = ["."]
+ else return readJson( path.resolve(where, "package.json")
+ , { dev: !npm.config.get("production") }
+ , function (er, data) {
+ if (er) return log.er(cb, "Couldn't read dependencies.")(er)
+ var deps = Object.keys(data.dependencies || {})
+ log.verbose([where, deps], "where, deps")
+ var family = {}
+ , ancestors = {}
+ family[data.name] = ancestors[data.name] = data.version
+ installManyTop(deps.map(function (dep) {
+ var target = data.dependencies[dep]
+ , parsed = url.parse(target.replace(/^git\+/, "git"))
+ if (!parsed.protocol) {
+ target = dep + "@" + target
+ }
+ return target
+ }), where, family, ancestors, false, cb)
+ })
+ }
+
+ // initial "family" is the name:version of the root, if it's got
+ // a pacakge.json file.
+ readJson(path.resolve(where, "package.json"), function (er, data) {
+ if (er) data = null
+ var family = {}
+ , ancestors = {}
+ if (data) family[data.name] = ancestors[data.name] = data.version
+ var fn = npm.config.get("global") ? installMany : installManyTop
+ fn(args, where, family, ancestors, true, cb)
+ })
+ })
+}
+
+// if the -S|--save option is specified, then write installed packages
+// as dependencies to a package.json file.
+// This is experimental.
+function save (where, installed, tree, pretty, cb) {
+ if (!npm.config.get("save") || npm.config.get("global")) {
+ return cb(null, installed, tree, pretty)
+ }
+ // each item in the tree is a top-level thing that should be saved
+ // to the package.json file.
+ // The relevant tree shape is { <folder>: {what:<pkg>} }
+ var saveTarget = path.resolve(where, "package.json")
+ , things = Object.keys(tree).map(function (k) {
+ //log.warn(k, "k")
+ return tree[k].what.split("@")
+ }).reduce(function (set, k) {
+ var rangeDescriptor = semver.gte(k[1], "0.1.0") ? "~" : ""
+ set[k[0]] = rangeDescriptor + k[1]
+ return set
+ }, {})
+
+ //log.warn(things, "things")
+
+ // don't use readJson, because we don't want to do all the other
+ // tricky npm-specific stuff that's in there.
+ fs.readFile(saveTarget, function (er, data) {
+ // ignore errors here, just don't save it.
+ try {
+ data = JSON.parse(data.toString("utf8"))
+ } catch (ex) {
+ er = ex
+ }
+ if (er) return cb(null, installed, tree, pretty)
+
+ var deps = npm.config.get("dev") ? "devDependencies" : "dependencies"
+ deps = data[deps] = data[deps] || {}
+
+ Object.keys(things).forEach(function (t) {
+ deps[t] = things[t]
+ })
+ data = JSON.stringify(data, null, 2) + "\n"
+ fs.writeFile(saveTarget, data, function (er) {
+ cb(er, installed, tree, pretty)
+ })
+ })
+}
+
+
+// Outputting *all* the installed modules is a bit confusing,
+// because the length of the path does not make it clear
+// that the submodules are not immediately require()able.
+// TODO: Show the complete tree, ls-style.
+function prettify (tree, installed) {
+ if (npm.config.get("parseable")) return parseable(installed)
+ return Object.keys(tree).map(function (p) {
+ p = tree[p]
+ var c = ""
+ if (p.children && p.children.length) {
+ pref = "\n"
+ var l = p.children.pop()
+ c = p.children.map(function (c) {
+ var gc = c.children && c.children.length
+ ? " (" + c.children.map(function (gc) {
+ return gc.what
+ }).join(" ") + ")"
+ : ""
+ return "\n├── " + c.what + gc
+ }).join("") + "\n└── " + l.what
+ }
+ return [p.what, p.where, c].join(" ")
+
+ }).join("\n")
+}
+
+function parseable (installed) {
+ var long = npm.config.get("long")
+ , cwd = process.cwd()
+ return installed.map(function (item) {
+ return path.resolve(cwd, item[1]) +
+ ( long ? ":" + item[0] : "" )
+ }).join("\n")
+}
+
+function treeify (installed) {
+ // each item is [what, where, parent, parentDir]
+ // If no parent, then report it.
+ // otherwise, tack it into the parent's children list.
+ // If the parent isn't a top-level then ignore it.
+ var whatWhere = installed.reduce(function (l, r) {
+ var parentDir = r[3]
+ , parent = r[2]
+ , where = r[1]
+ , what = r[0]
+ l[where] = { parentDir: parentDir
+ , parent: parent
+ , children: []
+ , where: where
+ , what: what }
+ return l
+ }, {})
+
+ //log.warn(whatWhere, "whatWhere")
+ return Object.keys(whatWhere).reduce(function (l, r) {
+ var ww = whatWhere[r]
+ //log.warn([r, ww], "r, ww")
+ if (!ww.parent) {
+ l[r] = ww
+ } else {
+ var p = whatWhere[ww.parentDir]
+ if (p) p.children.push(ww)
+ else l[r] = ww
+ }
+ return l
+ }, {})
+}
+
+
+// just like installMany, but also add the existing packages in
+// where/node_modules to the family object.
+function installManyTop (what, where, family, ancestors, explicit, cb_) {
+
+ function cb (er, d) {
+ if (explicit || er) return cb_(er, d)
+ // since this wasn't an explicit install, let's build the top
+ // folder, so that `npm install` also runs the lifecycle scripts.
+ npm.commands.build([where], false, true, function (er) {
+ return cb_(er, d)
+ })
+ }
+
+ if (explicit) return next()
+
+ readJson(path.join(where, "package.json"), function (er, data) {
+ if (er) return next(er)
+ lifecycle(data, "preinstall", where, next)
+ })
+
+ function next (er) {
+ if (er) return cb(er)
+ installManyTop_(what, where, family, ancestors, explicit, cb)
+ }
+}
+
+function installManyTop_ (what, where, family, ancestors, explicit, cb) {
+ var nm = path.resolve(where, "node_modules")
+ , names = explicit
+ ? what.map(function (w) { return w.split(/@/).shift() })
+ : []
+
+ fs.readdir(nm, function (er, pkgs) {
+ if (er) return installMany(what, where, family, ancestors, explicit, cb)
+ pkgs = pkgs.filter(function (p) {
+ return !p.match(/^[\._-]/)
+ && (!explicit || names.indexOf(p) === -1)
+ })
+ asyncMap(pkgs.map(function (p) {
+ return path.resolve(nm, p, "package.json")
+ }), function (jsonfile, cb) {
+ readJson(jsonfile, function (er, data) {
+ if (er) return cb(null, [])
+ return cb(null, [[data.name, data.version]])
+ })
+ }, function (er, packages) {
+ // add all the existing packages to the family list.
+ // however, do not add to the ancestors list.
+ packages.forEach(function (p) {
+ family[p[0]] = p[1]
+ })
+ return installMany(what, where, family, ancestors, explicit, cb)
+ })
+ })
+}
+
+function installMany (what, where, family, ancestors, explicit, cb) {
+ // 'npm install foo' should install the version of foo
+ // that satisfies the dep in the current folder.
+ // This will typically return immediately, since we already read
+ // this file family, and it'll be cached.
+ readJson(path.resolve(where, "package.json"), function (er, data) {
+ if (er) data = {}
+
+ d = data.dependencies || {}
+ var parent = data._id
+
+ log.verbose(what, "into "+where)
+ // what is a list of things.
+ // resolve each one.
+ asyncMap( what
+ , targetResolver(where, family, ancestors, explicit, d)
+ , function (er, targets) {
+ if (er) return cb(er)
+ // each target will be a data object corresponding
+ // to a package, folder, or whatever that is in the cache now.
+ var newPrev = Object.create(family)
+ , newAnc = Object.create(ancestors)
+
+ newAnc[data.name] = data.version
+ targets.forEach(function (t) {
+ newPrev[t.name] = t.version
+ })
+ log.silly(targets, "resolved")
+ targets.filter(function (t) { return t }).forEach(function (t) {
+ log.info(t._id, "into "+where)
+ })
+ asyncMap(targets, function (target, cb) {
+ log(target._id, "installOne")
+ installOne(target, where, newPrev, newAnc, parent, cb)
+ }, cb)
+ })
+ })
+}
+
+function targetResolver (where, family, ancestors, explicit, deps) {
+ var alreadyInstalledManually = explicit ? [] : null
+ , nm = path.resolve(where, "node_modules")
+
+ if (!explicit) fs.readdir(nm, function (er, inst) {
+ if (er) return alreadyInstalledManually = []
+ asyncMap(inst, function (pkg, cb) {
+ readJson(path.resolve(nm, pkg, "package.json"), function (er, d) {
+ if (er) return cb(null, [])
+ if (semver.satisfies(d.version, deps[d.name] || "*")) {
+ return cb(null, d.name)
+ }
+ return cb(null, [])
+ })
+ }, function (er, inst) {
+ // this is the list of things that are valid and should be ignored.
+ alreadyInstalledManually = inst
+ })
+ })
+
+ var to = 0
+ return function resolver (what, cb) {
+ if (!alreadyInstalledManually) return setTimeout(function () {
+ resolver(what, cb)
+ }, to++)
+ // now we know what's been installed here manually,
+ // or tampered with in some way that npm doesn't want to overwrite.
+ if (alreadyInstalledManually.indexOf(what.split("@").shift()) !== -1) {
+ log.verbose("skipping "+what, "already installed in "+where)
+ return cb(null, [])
+ }
+
+ if (family[what] && semver.satisfies(family[what], deps[what] || "")) {
+ return cb(null, [])
+ }
+
+ if (deps[what]) {
+ what = what + "@" + deps[what]
+ }
+ log.verbose(what, "cache add")
+ cache.add(what, function (er, data) {
+ if (!er && data && family[data.name] === data.version) {
+ return cb(null, [])
+ }
+ return cb(er, data)
+ })
+ }
+}
+
+// we've already decided to install this. if anything's in the way,
+// then uninstall it first.
+function installOne (target, where, family, ancestors, parent, cb) {
+ // the --link flag makes this a "link" command if it's at the
+ // the top level.
+ if (where === npm.prefix && npm.config.get("link")
+ && !npm.config.get("global")) {
+ return localLink(target, where, family, ancestors, parent, cb)
+ }
+ installOne_(target, where, family, ancestors, parent, cb)
+}
+
+function localLink (target, where, family, ancestors, parent, cb) {
+ log.verbose(target._id, "try to link")
+ var jsonFile = path.resolve( npm.dir, target.name
+ , "package.json" )
+
+ readJson(jsonFile, function (er, data) {
+ if (er || data._id === target._id) {
+ if (er) {
+ install( path.resolve(npm.globalDir, "..")
+ , target._id
+ , function (er) {
+ if (er) return cb(er, [])
+ thenLink()
+ })
+ } else thenLink()
+
+ function thenLink () {
+ npm.commands.link([target.name], function (er, d) {
+ log.silly([er, d], "back from link")
+ cb(er, [resultList(target, where, parent)])
+ })
+ }
+
+ } else {
+ log.verbose(target._id, "install locally (no link)")
+ installOne_(target, where, family, ancestors, parent, cb)
+ }
+ })
+}
+
+function resultList (target, where, parent) {
+ var nm = path.resolve(where, "node_modules")
+ , targetFolder = path.resolve(nm, target.name)
+ , prettyWhere = relativize(where, process.cwd() + "/x")
+
+ if (prettyWhere === ".") prettyWhere = null
+
+ if (!npm.config.get("global")) {
+ // print out the folder relative to where we are right now.
+ // relativize isn't really made for dirs, so you need this hack
+ targetFolder = relativize(targetFolder, process.cwd()+"/x")
+ }
+
+ return [ target._id
+ , targetFolder
+ , prettyWhere && parent
+ , parent && prettyWhere ]
+}
+
+function installOne_ (target, where, family, ancestors, parent, cb) {
+ var nm = path.resolve(where, "node_modules")
+ , targetFolder = path.resolve(nm, target.name)
+ , prettyWhere = relativize(where, process.cwd() + "/x")
+
+ if (prettyWhere === ".") prettyWhere = null
+
+ chain
+ ( [ [checkEngine, target]
+ , [checkCycle, target, ancestors]
+ , [checkGit, targetFolder]
+ , [write, target, targetFolder, family, ancestors] ]
+ , function (er, d) {
+ log.verbose(target._id, "installOne cb")
+ if (er) return cb(er)
+
+ d.push(resultList(target, where, parent))
+ cb(er, d)
+ }
+ )
+}
+
+function checkEngine (target, cb) {
+ var npmv = npm.version
+ , force = npm.config.get("force")
+ , nodev = force ? null : npm.config.get("node-version")
+ , eng = target.engines
+ if (!eng) return cb()
+ if (nodev && eng.node && !semver.satisfies(nodev, eng.node)
+ || eng.npm && !semver.satisfies(npmv, eng.npm)) {
+ var er = new Error("Unsupported")
+ er.errno = npm.ENOTSUP
+ er.required = eng
+ er.pkgid = target._id
+ return cb(er)
+ }
+ return cb()
+}
+
+
+function checkCycle (target, ancestors, cb) {
+ // there are some very rare and pathological edge-cases where
+ // a cycle can cause npm to try to install a never-ending tree
+ // of stuff.
+ // Simplest:
+ //
+ // A -> B -> A' -> B' -> A -> B -> A' -> B' -> A -> ...
+ //
+ // Solution: Simply flat-out refuse to install any name@version
+ // that is already in the prototype tree of the ancestors object.
+ // A more correct, but more complex, solution would be to symlink
+ // the deeper thing into the new location.
+ // Will do that if anyone whines about this irl.
+
+ var p = Object.getPrototypeOf(ancestors)
+ , name = target.name
+ , version = target.version
+ while (p && p !== Object.prototype && p[name] !== version) {
+ p = Object.getPrototypeOf(p)
+ }
+ if (p[name] !== version) return cb()
+
+ var er = new Error("Unresolvable cycle detected")
+ var tree = [target._id, JSON.parse(JSON.stringify(ancestors))]
+ , t = Object.getPrototypeOf(ancestors)
+ while (t && t !== Object.prototype) {
+ if (t === p) t.THIS_IS_P = true
+ tree.push(JSON.parse(JSON.stringify(t)))
+ t = Object.getPrototypeOf(t)
+ }
+ log.verbose(tree, "unresolvable dependency tree")
+ er.pkgid = target._id
+ er.errno = npm.ECYCLE
+ return cb(er)
+}
+
+function checkGit (folder, cb) {
+ // if it's a git repo then don't touch it!
+ fs.lstat(folder, function (er, s) {
+ if (er || !s.isDirectory()) return cb()
+ else checkGit_(folder, cb)
+ })
+}
+
+function checkGit_ (folder, cb) {
+ fs.stat(path.resolve(folder, ".git"), function (er, s) {
+ if (!er && s.isDirectory()) {
+ var e = new Error("Appears to be a git repo or submodule.")
+ e.path = folder
+ e.errno = npm.EISGIT
+ return cb(e)
+ }
+ cb()
+ })
+}
+
+function write (target, targetFolder, family, ancestors, cb_) {
+ var up = npm.config.get("unsafe-perm")
+ , user = up ? null : npm.config.get("user")
+ , group = up ? null : npm.config.get("group")
+
+ function cb (er, data) {
+ // cache.unpack returns the data object, and all we care about
+ // is the list of installed packages from that last thing.
+ if (!er) return cb_(er, data)
+ log.error(er, "error installing "+target._id)
+ if (false === npm.config.get("rollback")) return cb_(er)
+ npm.commands.unbuild([targetFolder], function (er2) {
+ if (er2) log.error(er2, "error rolling back "+target._id)
+ return cb_(er, data)
+ })
+ }
+
+ chain
+ ( [ [ npm.commands.unbuild, [targetFolder] ]
+ , [ cache.unpack, target.name, target.version, targetFolder
+ , null, null, user, group ]
+ , [ lifecycle, target, "preinstall", targetFolder ] ]
+
+ // nest the chain so that we can throw away the results returned
+ // up until this point, since we really don't care about it.
+ , function (er) {
+ if (er) return cb(er)
+ var deps = Object.keys(target.dependencies || {})
+ installMany(deps.filter(function (d) {
+ // prefer to not install things that are satisfied by
+ // something in the "family" list.
+ return !semver.satisfies(family[d], target.dependencies[d])
+ }).map(function (d) {
+ var t = target.dependencies[d]
+ , parsed = url.parse(t.replace(/^git\+/, "git"))
+ if (!parsed.protocol) {
+ t = d + "@" + t
+ }
+ return t
+ }), targetFolder, family, ancestors, false, function (er, d) {
+ //log.warn(d, "write installMany cb")
+ log.verbose(targetFolder, "about to build")
+ if (er) return cb(er)
+ npm.commands.build( [targetFolder]
+ , npm.config.get("global")
+ , true
+ , function (er) { return cb(er, d) })
+ })
+ } )
+}
diff --git a/deps/npm/lib/link.js b/deps/npm/lib/link.js
new file mode 100644
index 0000000000..fea6606666
--- /dev/null
+++ b/deps/npm/lib/link.js
@@ -0,0 +1,158 @@
+
+// link with no args: symlink the folder to the global location
+// link with package arg: symlink the global to the local
+
+var npm = require("./npm.js")
+ , symlink = require("./utils/link.js")
+ , fs = require("graceful-fs")
+ , log = require("./utils/log.js")
+ , asyncMap = require("slide").asyncMap
+ , chain = require("slide").chain
+ , path = require("path")
+ , relativize = require("./utils/relativize.js")
+ , rm = require("rimraf")
+ , output = require("./utils/output.js")
+ , build = require("./build.js")
+
+module.exports = link
+
+link.usage = "npm link (in package dir)"
+ + "\nnpm link <pkg> (link global into local)"
+
+link.completion = function (opts, cb) {
+ var dir = npm.globalDir
+ fs.readdir(dir, function (er, files) {
+ cb(er, files.filter(function (f) {
+ return f.charAt(0) !== "."
+ }))
+ })
+}
+
+function link (args, cb) {
+ if (process.platform === "win32") {
+ var e = new Error("npm link not supported on windows")
+ e.code = "ENOTSUP"
+ e.errno = require("constants").ENOTSUP
+ return cb(e)
+ }
+
+ if (npm.config.get("global")) {
+ return cb(new Error("link should never be --global.\n"
+ +"Please re-run this command with --local"))
+ }
+ if (args.length === 1 && args[0] === ".") args = []
+ if (args.length) return linkInstall(args, cb)
+ linkPkg(npm.prefix, cb)
+}
+
+function linkInstall (pkgs, cb) {
+ asyncMap(pkgs, function (pkg, cb) {
+ function n (er, data) {
+ if (er) return cb(er, data)
+ // install returns [ [folder, pkgId], ... ]
+ // but we definitely installed just one thing.
+ var d = data.filter(function (d) { return !d[3] })
+ pp = d[0][1]
+ pkg = path.basename(pp)
+ target = path.resolve(npm.dir, pkg)
+ next()
+ }
+
+ var t = path.resolve(npm.globalDir, "..")
+ , pp = path.resolve(npm.globalDir, pkg)
+ , rp = null
+ , target = path.resolve(npm.dir, pkg)
+
+ // if it's a folder or a random not-installed thing, then
+ // link or install it first
+ if (pkg.indexOf("/") !== -1 || pkg.indexOf("\\") !== -1) {
+ return fs.lstat(path.resolve(pkg), function (er, st) {
+ if (er || !st.isDirectory()) {
+ npm.commands.install(t, pkg, n)
+ } else {
+ rp = path.resolve(pkg)
+ linkPkg(rp, n)
+ }
+ })
+ }
+
+ fs.lstat(pp, function (er, st) {
+ if (er) {
+ rp = pp
+ return npm.commands.install(t, pkg, n)
+ } else if (!st.isSymbolicLink()) {
+ rp = pp
+ next()
+ } else {
+ return fs.realpath(pp, function (er, real) {
+ if (er) log.warn(pkg, "invalid symbolic link")
+ else rp = real
+ next()
+ })
+ }
+ })
+
+ function next () {
+ chain
+ ( [ [npm.commands, "unbuild", [target]]
+ , [log.verbose, "symlinking " + pp + " to "+target, "link"]
+ , [symlink, pp, target]
+ // do run lifecycle scripts - full build here.
+ , rp && [build, [target]]
+ , [ resultPrinter, pkg, pp, target, rp ] ]
+ , cb )
+ }
+ }, cb)
+}
+
+function linkPkg (folder, cb_) {
+ var me = folder || npm.prefix
+ , readJson = require("./utils/read-json.js")
+ readJson( path.resolve(me, "package.json")
+ , { dev: true }
+ , function (er, d) {
+ function cb (er) {
+ return cb_(er, [[d && d._id, target, null, null]])
+ }
+ if (er) return cb(er)
+ var target = path.resolve(npm.globalDir, d.name)
+ rm(target, function (er) {
+ if (er) return cb(er)
+ symlink(me, target, function (er) {
+ if (er) return cb(er)
+ log.verbose(target, "link: build target")
+ // also install missing dependencies.
+ npm.commands.install(me, [], function (er, installed) {
+ if (er) return cb(er)
+ // build the global stuff. Don't run *any* scripts, because
+ // install command already will have done that.
+ build([target], true, build._noLC, true, function (er) {
+ if (er) return cb(er)
+ resultPrinter(path.basename(me), me, target, cb)
+ })
+ })
+ })
+ })
+ })
+}
+
+function resultPrinter (pkg, src, dest, rp, cb) {
+ if (typeof cb !== "function") cb = rp, rp = null
+ var where = relativize(dest, path.resolve(process.cwd(),"x"))
+ rp = (rp || "").trim()
+ src = (src || "").trim()
+ if (npm.config.get("parseable")) {
+ return parseableOutput(dest, rp || src, cb)
+ }
+ if (rp === src) rp = null
+ output.write(where+" -> " + src
+ +(rp ? " -> " + rp: ""), cb)
+}
+
+function parseableOutput (dest, rp, cb) {
+ // link is always effectively "long", since it doesn't help much to
+ // *just* print the target folder.
+ // However, we don't actually ever read the version number, so
+ // the second field is always blank.
+ output.write(dest + "::" + rp, cb)
+}
diff --git a/deps/npm/lib/ls.js b/deps/npm/lib/ls.js
new file mode 100644
index 0000000000..33336cf63e
--- /dev/null
+++ b/deps/npm/lib/ls.js
@@ -0,0 +1,169 @@
+
+// show the installed versions of packages
+//
+// --parseable creates output like this:
+// <fullpath>:<name@ver>:<realpath>:<flags>
+// Flags are a :-separated list of zero or more indicators
+
+module.exports = exports = ls
+
+var npm = require("./npm.js")
+ , readInstalled = require("./utils/read-installed.js")
+ , output = require("./utils/output.js")
+ , log = require("./utils/log.js")
+ , relativize = require("./utils/relativize.js")
+ , path = require("path")
+
+ls.usage = "npm ls"
+
+function ls (args, silent, cb) {
+ if (typeof cb !== "function") cb = silent, silent = false
+
+ if (args.length) {
+ log.warn("ls doesn't take positional args. Try the 'search' command")
+ }
+
+ var dir = path.resolve(npm.dir, "..")
+
+ readInstalled(dir, function (er, data) {
+ if (er || silent) return cb(er, data)
+ var long = npm.config.get("long")
+ var out = makePretty(bfsify(data), long, dir).join("\n")
+ output.write(out, function (er) { cb(er, data) })
+ })
+}
+
+function bfsify (root, current, queue, seen) {
+ // walk over the data, and turn it from this:
+ // +-- a
+ // | `-- b
+ // | `-- a (truncated)
+ // `--b (truncated)
+ // into this:
+ // +-- a
+ // `-- b
+ // which looks nicer
+ current = current || root
+ queue = queue || []
+ seen = seen || []
+ var deps = current.dependencies = current.dependencies || {}
+ Object.keys(deps).forEach(function (d) {
+ var dep = deps[d]
+ if (typeof dep !== "object") return
+ if (seen.indexOf(dep) !== -1) {
+ if (npm.config.get("parseable") || !npm.config.get("long")) {
+ delete deps[d]
+ return
+ } else {
+ dep = deps[d] = Object.create(dep)
+ dep.dependencies = {}
+ }
+ }
+ queue.push(dep)
+ seen.push(dep)
+ })
+ if (!queue.length) return root
+ return bfsify(root, queue.shift(), queue, seen)
+}
+
+
+function makePretty (data, long, dir, prefix, list) {
+ var top = !list
+ list = list || []
+ prefix = prefix || ""
+ list.push(format(data, long, prefix, dir))
+ var deps = data.dependencies || {}
+ , childPref = prefix.split("├─").join("│ ")
+ .split("└─").join(" ")
+ , depList = Object.keys(deps)
+ , depLast = depList.length - 1
+ , maxDepth = npm.config.get("depth")
+ Object.keys(deps).sort(function (a, b) {
+ return a > b ? 1 : -1
+ }).forEach(function (d, i) {
+ var depData = deps[d]
+ if (typeof depData === "string") {
+ if (data.depth < maxDepth) {
+ var p = data.link || data.path
+ log.warn("Unmet dependency in "+p, d+" "+deps[d])
+ depData = npm.config.get("parseable")
+ ? ( npm.config.get("long")
+ ? path.resolve(data.path, "node_modules", d)
+ + ":"+d+"@"+JSON.stringify(depData)+":INVALID:MISSING"
+ : "" )
+ : "─ \033[31;40mUNMET DEPENDENCY\033[0m "+d+" "+depData
+ } else {
+ if (npm.config.get("parseable")) {
+ depData = path.resolve(data.path, "node_modules", d)
+ + (npm.config.get("long")
+ ? ":" + d + "@" + JSON.stringify(depData)
+ + ":" // no realpath resolved
+ + ":MAXDEPTH"
+ : "")
+ } else {
+ depData = "─ "+d+"@'"+depData +"' (max depth reached)"
+ }
+ }
+ }
+ var c = i === depLast ? "└─" : "├─"
+ makePretty(depData, long, dir, childPref + c, list)
+ })
+ if (top && list.length === 1 && !data._id) {
+ if (!npm.config.get("parseable")) {
+ list.push("(empty)")
+ } else if (npm.config.get("long")) list[0] += ":EMPTY"
+ }
+ return list.filter(function (l) { return l && l.trim() })
+}
+
+function ugly (data) {
+ if (typeof data === "string") {
+ return data
+ }
+ if (!npm.config.get("long")) return data.path
+
+ return data.path
+ + ":" + (data._id || "")
+ + ":" + (data.realPath !== data.path ? data.realPath : "")
+ + (data.extraneous ? ":EXTRANEOUS" : "")
+ + (data.invalid ? ":INVALID" : "")
+}
+
+function format (data, long, prefix, dir) {
+ if (npm.config.get("parseable")) return ugly(data)
+ if (typeof data === "string") {
+ return prefix + data
+ }
+// console.log([data.path, dir], "relativize")
+ var depLen = Object.keys(data.dependencies).length
+ , space = prefix.split("├─").join("│ ")
+ .split("└─").join(" ")
+ + (depLen ? "" : " ")
+ , rel = relativize(data.path || "", dir)
+ , l = prefix
+ + (rel === "." ? "" : depLen ? "┬ " : "─ ")
+ + (data._id ? data._id + " " : "")
+ + (data.link ? "-> " + data.link : "") + ""
+ + (rel === "." && !(long && data._id) ? dir : "")
+ if (data.invalid) {
+ if (data.realName !== data.name) l += " ("+data.realName+")"
+ l += " \033[31;40minvalid\033[0m"
+ }
+ if (data.extraneous && rel !== ".") {
+ l += " \033[32;40mextraneous\033[0m"
+ }
+ if (!long || !data._id) return l
+ var extras = []
+ if (rel !== ".") extras.push(rel)
+ else extras.push(dir)
+ if (data.description) extras.push(data.description)
+ if (data.repository) extras.push(data.repository.url)
+ if (data.homepage) extras.push(data.homepage)
+ extras = extras.filter(function (e) { return e })
+ var lastExtra = !depLen && extras.length - 1
+ l += extras.map(function (e, i) {
+ var indent = !depLen ? " " : "│ "
+ return "\n" + space + indent + e
+ }).join("")
+ return l
+}
diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js
new file mode 100644
index 0000000000..51f7d64b88
--- /dev/null
+++ b/deps/npm/lib/npm.js
@@ -0,0 +1,448 @@
+;(function(){
+// windows: running "npm blah" in this folder will invoke WSH, not node.
+if (typeof WScript !== "undefined") {
+ WScript.echo("npm does not work when run\n"
+ +"with the Windows Scripting Host\n\n"
+ +"'cd' to a different directory,\n"
+ +"or type 'npm.cmd <args>',\n"
+ +"or type 'node npm <args>'.")
+ WScript.quit(1)
+ return
+}
+
+
+process.title = "npm"
+
+
+// FIXME there really ought to be a path.split in node core
+require("path").SPLIT_CHAR = process.platform === "win32" ? "\\" : "/"
+
+var EventEmitter = require("events").EventEmitter
+ , npm = module.exports = new EventEmitter
+ , config = require("./config.js")
+ , set = require("./utils/set.js")
+ , get = require("./utils/get.js")
+ , ini = require("./utils/ini.js")
+ , log = require("./utils/log.js")
+ , fs = require("graceful-fs")
+ , path = require("path")
+ , abbrev = require("abbrev")
+ , which = require("which")
+ , semver = require("semver")
+ , findPrefix = require("./utils/find-prefix.js")
+ , getUid = require("./utils/uid-number.js")
+ , mkdir = require("./utils/mkdir-p.js")
+
+npm.commands = {}
+npm.ELIFECYCLE = {}
+npm.E404 = {}
+npm.EPUBLISHCONFLICT = {}
+npm.EJSONPARSE = {}
+npm.EISGIT = {}
+npm.ECYCLE = {}
+npm.ENOTSUP = {}
+
+// HACK for windows
+if (process.platform === "win32") {
+ // stub in unavailable methods from process and fs binding
+ if (!process.getuid) process.getuid = function() {}
+ if (!process.getgid) process.getgid = function() {}
+ var fsBinding = process.binding("fs")
+ if (!fsBinding.chown) fsBinding.chown = function() {
+ var cb = arguments[arguments.length - 1]
+ if (typeof cb == "function") cb()
+ }
+
+ // patch rename/renameSync, but this should really be fixed in node
+ var _fsRename = fs.rename
+ , _fsPathPatch
+ _fsPathPatch = function(p) {
+ return p && p.replace(/\\/g, "/") || p;
+ }
+ fs.rename = function(p1, p2) {
+ arguments[0] = _fsPathPatch(p1)
+ arguments[1] = _fsPathPatch(p2)
+ return _fsRename.apply(fs, arguments);
+ }
+}
+
+try {
+ // startup, ok to do this synchronously
+ var j = JSON.parse(fs.readFileSync(
+ path.join(__dirname, "../package.json"))+"")
+ npm.version = j.version
+ npm.nodeVersionRequired = j.engines.node
+ if (!semver.satisfies(process.version, j.engines.node)) {
+ log.error([""
+ ,"npm requires node version: "+j.engines.node
+ ,"And you have: "+process.version
+ ,"which is not satisfactory."
+ ,""
+ ,"Bad things will likely happen. You have been warned."
+ ,""].join("\n"), "unsupported version")
+ }
+} catch (ex) {
+ try {
+ log(ex, "error reading version")
+ } catch (er) {}
+ npm.version = ex
+}
+
+var commandCache = {}
+ // short names for common things
+ , aliases = { "rm" : "uninstall"
+ , "r" : "uninstall"
+ , "un" : "uninstall"
+ , "unlink" : "uninstall"
+ , "remove" : "uninstall"
+ , "rb" : "rebuild"
+ , "list" : "ls"
+ , "la" : "ls"
+ , "ll" : "ls"
+ , "ln" : "link"
+ , "i" : "install"
+ , "up" : "update"
+ , "c" : "config"
+ , "info" : "view"
+ , "show" : "view"
+ , "find" : "search"
+ , "s" : "search"
+ , "se" : "search"
+ , "author" : "owner"
+ , "home" : "docs"
+ , "unstar": "star" // same function
+ , "apihelp" : "help"
+ }
+
+ , aliasNames = Object.keys(aliases)
+ // these are filenames in .
+ , cmdList = [ "install"
+ , "uninstall"
+ , "cache"
+ , "config"
+ , "set"
+ , "get"
+ , "update"
+ , "outdated"
+ , "prune"
+ , "submodule"
+ , "pack"
+
+ , "rebuild"
+ , "link"
+
+ , "publish"
+ , "star"
+ , "tag"
+ , "adduser"
+ , "unpublish"
+ , "owner"
+ , "deprecate"
+
+ , "help"
+ , "help-search"
+ , "ls"
+ , "search"
+ , "view"
+ , "init"
+ , "version"
+ , "edit"
+ , "explore"
+ , "docs"
+ , "bugs"
+ , "faq"
+ , "root"
+ , "prefix"
+ , "bin"
+ , "whoami"
+
+ , "test"
+ , "stop"
+ , "start"
+ , "restart"
+ , "run-script"
+ , "completion"
+ ]
+ , plumbing = [ "build"
+ , "unbuild"
+ , "xmas"
+ ]
+ , fullList = npm.fullList = cmdList.concat(aliasNames).filter(function (c) {
+ return plumbing.indexOf(c) === -1
+ })
+ , abbrevs = abbrev(fullList)
+
+Object.keys(abbrevs).concat(plumbing).forEach(function addCommand (c) {
+ Object.defineProperty(npm.commands, c, { get : function () {
+ if (!loaded) throw new Error(
+ "Call npm.load(conf, cb) before using this command.\n"+
+ "See the README.md or cli.js for example usage.")
+ var a = npm.deref(c)
+ if (c === "la" || c === "ll") {
+ npm.config.set("long", true)
+ }
+ npm.command = c
+ if (commandCache[a]) return commandCache[a]
+ var cmd = require(__dirname+"/"+a+".js")
+ commandCache[a] = function () {
+ var args = Array.prototype.slice.call(arguments, 0)
+ if (typeof args[args.length - 1] !== "function") {
+ args.push(defaultCb)
+ }
+ if (args.length === 1) args.unshift([])
+ cmd.apply(npm, args)
+ }
+ Object.keys(cmd).forEach(function (k) {
+ commandCache[a][k] = cmd[k]
+ })
+ return commandCache[a]
+ }, enumerable: fullList.indexOf(c) !== -1 })
+
+ // make css-case commands callable via camelCase as well
+ if (c.match(/\-([a-z])/)) {
+ addCommand(c.replace(/\-([a-z])/g, function (a, b) {
+ return b.toUpperCase()
+ }))
+ }
+})
+
+function defaultCb (er, data) {
+ if (er) console.error(er.stack || er.message)
+ else console.log(data)
+}
+
+npm.deref = function (c) {
+ if (!c) return ""
+ if (c.match(/[A-Z]/)) c = c.replace(/([A-Z])/g, function (m) {
+ return "-" + m.toLowerCase()
+ })
+ if (plumbing.indexOf(c) !== -1) return c
+ var a = abbrevs[c]
+ if (aliases[a]) a = aliases[a]
+ return a
+}
+
+var loaded = false
+ , loading = false
+ , loadErr = null
+ , loadListeners = []
+
+function loadCb (er) {
+ loadListeners.forEach(function (cb) {
+ process.nextTick(cb.bind(npm, er, npm))
+ })
+ loadListeners.length = 0
+}
+
+
+npm.load = function (conf, cb_) {
+ if (!cb_ && typeof conf === "function") cb_ = conf , conf = {}
+ if (!cb_) cb_ = function () {}
+ if (!conf) conf = {}
+ loadListeners.push(cb_)
+ if (loaded || loadErr) return cb(loadErr)
+ if (loading) return
+ loading = true
+ var onload = true
+
+ function cb (er) {
+ if (loadErr) return
+ loaded = true
+ loadCb(loadErr = er)
+ if (onload = onload && npm.config.get("onload-script")) {
+ require(onload)
+ onload = false
+ }
+ }
+
+ log.waitForConfig()
+
+ load(npm, conf, cb)
+}
+
+
+function load (npm, conf, cb) {
+ which(process.argv[0], function (er, node) {
+ //console.error("back from which")
+ if (!er && node.toUpperCase() !== process.execPath.toUpperCase()) {
+ log.verbose("node symlink", node)
+ process.execPath = node
+ process.installPrefix = path.resolve(node, "..", "..")
+ }
+
+ // look up configs
+ //console.error("about to look up configs")
+
+ ini.resolveConfigs(conf, function (er) {
+ //console.error("back from config lookup", er && er.stack)
+ if (er) return cb(er)
+
+ var n = 2
+ , errState
+
+ var umask = parseInt(conf.umask, 8)
+ npm.modes = { exec: 0777 & (~umask)
+ , file: 0666 & (~umask)
+ , umask: umask }
+
+ loadPrefix(npm, conf, next)
+ loadUid(npm, conf, next)
+
+ function next (er) {
+ //console.error("next", er && er.stack)
+ if (errState) return
+ if (er) return cb(errState = er)
+ if (-- n <= 0) return cb()
+ }
+ })
+ })
+}
+
+function loadPrefix (npm, conf, cb) {
+ // try to guess at a good node_modules location.
+ var p
+ , gp
+ if (!conf.hasOwnProperty("prefix")) {
+ p = process.cwd()
+ } else {
+ p = npm.config.get("prefix")
+ }
+ gp = npm.config.get("prefix")
+
+ findPrefix(p, function (er, p) {
+ //console.log("Back from findPrefix", er && er.stack, p)
+ Object.defineProperty(npm, "localPrefix",
+ { get : function () { return p }
+ , set : function (r) { return p = r }
+ , enumerable : true
+ })
+ // the prefix MUST exist, or else nothing works.
+ mkdir(p, npm.modes.exec, null, null, true, next)
+ })
+
+ findPrefix(gp, function (er, gp) {
+ Object.defineProperty(npm, "globalPrefix",
+ { get : function () { return gp }
+ , set : function (r) { return gp = r }
+ , enumerable : true
+ })
+ // the prefix MUST exist, or else nothing works.
+ mkdir(gp, npm.modes.exec, null, null, true, next)
+ })
+
+ var i = 2
+ , errState = null
+ function next (er) {
+ if (errState) return
+ if (er) return cb(errState = er)
+ if (--i === 0) return cb()
+ }
+}
+
+
+function loadUid (npm, conf, cb) {
+ // if we're not in unsafe-perm mode, then figure out who
+ // to run stuff as. Do this first, to support `npm update npm -g`
+ if (!npm.config.get("unsafe-perm")) {
+ getUid(npm.config.get("user"), npm.config.get("group"), cb)
+ } else {
+ //console.error("skipping loadUid")
+ process.nextTick(cb)
+ }
+}
+
+
+npm.config =
+ { get : function (key) { return ini.get(key) }
+ , set : function (key, val) { return ini.set(key, val, "cli") }
+ , del : function (key, val) { return ini.del(key, val, "cli") }
+ }
+
+Object.defineProperty(npm, "prefix",
+ { get : function () {
+ return npm.config.get("global") ? npm.globalPrefix : npm.localPrefix
+ }
+ , set : function (r) {
+ var k = npm.config.get("global") ? "globalPrefix" : "localPrefix"
+ return npm[k] = r
+ }
+ , enumerable : true
+ })
+
+Object.defineProperty(npm, "bin",
+ { get : function () {
+ if (npm.config.get("global")) return npm.globalBin
+ return path.resolve(npm.root, ".bin")
+ }
+ , enumerable : true
+ })
+
+Object.defineProperty(npm, "globalBin",
+ { get : function () {
+ var b = npm.globalPrefix
+ if (process.platform !== "win32") b = path.resolve(b, "bin")
+ return b
+ }
+ })
+
+Object.defineProperty(npm, "dir",
+ { get : function () {
+ if (npm.config.get("global")) return npm.globalDir
+ return path.resolve(npm.prefix, "node_modules")
+ }
+ , enumerable : true
+ })
+
+Object.defineProperty(npm, "globalDir",
+ { get : function () {
+ return (process.platform !== "win32")
+ ? path.resolve(npm.globalPrefix, "lib", "node_modules")
+ : path.resolve(npm.globalPrefix, "node_modules")
+ }
+ , enumerable : true
+ })
+
+Object.defineProperty(npm, "root",
+ { get : function () { return npm.dir } })
+
+Object.defineProperty(npm, "cache",
+ { get : function () { return npm.config.get("cache") }
+ , set : function (r) { return npm.config.set("cache", r) }
+ , enumerable : true
+ })
+
+var tmpFolder
+Object.defineProperty(npm, "tmp",
+ { get : function () {
+ if (!tmpFolder) tmpFolder = "npm-"+Date.now()
+ return path.resolve(npm.config.get("tmp"), tmpFolder)
+ }
+ , enumerable : true
+ })
+
+// the better to repl you with
+Object.getOwnPropertyNames(npm.commands).forEach(function (n) {
+ if (npm.hasOwnProperty(n)) return
+
+ Object.defineProperty(npm, n, { get: function () {
+ return function () {
+ var args = Array.prototype.slice.call(arguments, 0)
+ , cb = defaultCb
+
+ if (args.length === 1 && Array.isArray(args[0])) {
+ args = args[0]
+ }
+
+ if (typeof args[args.length - 1] === "function") {
+ cb = args.pop()
+ }
+
+ npm.commands[n](args, cb)
+ }
+ }, enumerable: false, configurable: true })
+})
+
+if (require.main === module) {
+ require("../bin/npm-cli.js")
+}
+})()
diff --git a/deps/npm/lib/outdated.js b/deps/npm/lib/outdated.js
new file mode 100644
index 0000000000..496dfbd426
--- /dev/null
+++ b/deps/npm/lib/outdated.js
@@ -0,0 +1,146 @@
+/*
+
+npm outdated [pkg]
+
+Does the following:
+
+1. check for a new version of pkg
+
+If no packages are specified, then run for all installed
+packages.
+
+*/
+
+module.exports = outdated
+
+outdated.usage = "npm outdated [<pkg> [<pkg> ...]]"
+
+outdated.completion = require("./utils/completion/installed-deep.js")
+
+
+var path = require("path")
+ , fs = require("graceful-fs")
+ , readJson = require("./utils/read-json.js")
+ , cache = require("./cache.js")
+ , asyncMap = require("slide").asyncMap
+ , npm = require("./npm.js")
+ , log = require("./utils/log.js")
+ , semver = require("semver")
+ , relativize = require("./utils/relativize.js")
+
+function outdated (args, silent, cb) {
+ if (typeof cb !== "function") cb = silent, silent = false
+ var dir = path.resolve(npm.dir, "..")
+ outdated_(args, dir, {}, function (er, list) {
+ function cb_ (er) { cb(er, list) }
+
+ if (er || silent) return cb_(er)
+ var outList = list.map(makePretty)
+ require("./utils/output.js").write(outList.join("\n"), cb_)
+ })
+}
+
+// [[ dir, dep, has, want ]]
+function makePretty (p) {
+ var parseable = npm.config.get("parseable")
+ , long = npm.config.get("long")
+ , dep = p[1]
+ , dir = path.resolve(p[0], "node_modules", dep)
+ , has = p[2]
+ , want = p[3]
+ if (parseable) {
+ var str = dir
+ if (npm.config.get("long")) {
+ str += ":" + dep + "@" + want
+ + ":" + (has ? (dep + "@" + has) : "MISSING")
+ }
+ return str
+ }
+
+ if (!npm.config.get("global")) {
+ dir = relativize(dir, process.cwd()+"/x")
+ }
+ return dep + "@" + want + " " + dir
+ + " current=" + (has || "MISSING")
+}
+
+function outdated_ (args, dir, parentHas, cb) {
+ // get the deps from package.json, or {<dir/node_modules/*>:"*"}
+ // asyncMap over deps:
+ // shouldHave = cache.add(dep, req).version
+ // if has === shouldHave then
+ // return outdated(args, dir/node_modules/dep, parentHas + has)
+ // else if dep in args or args is empty
+ // return [dir, dep, has, shouldHave]
+
+ var deps = null
+ readJson(path.resolve(dir, "package.json"), function (er, d) {
+ deps = (er) ? true : d.dependencies
+ return next()
+ })
+
+ var has = null
+ fs.readdir(path.resolve(dir, "node_modules"), function (er, pkgs) {
+ if (er) {
+ has = Object.create(parentHas)
+ return next()
+ }
+ asyncMap(pkgs, function (pkg, cb) {
+ readJson( path.resolve(dir, "node_modules", pkg, "package.json")
+ , function (er, d) {
+ cb(null, er ? [] : [[d.name, d.version]])
+ })
+ }, function (er, pvs) {
+ if (er) return cb(er)
+ has = Object.create(parentHas)
+ pvs.forEach(function (pv) {
+ has[pv[0]] = pv[1]
+ })
+ next()
+ })
+ })
+
+ function next () {
+ if (!has || !deps) return
+ if (deps === true) {
+ deps = Object.keys(has).reduce(function (l, r) {
+ l[r] = "*"
+ return l
+ }, {})
+ }
+ // now get what we should have, based on the dep.
+ // if has[dep] !== shouldHave[dep], then cb with the data
+ // otherwise dive into the folder
+ asyncMap(Object.keys(deps), function (dep, cb) {
+ shouldUpdate(args, dir, dep, has, deps[dep], cb)
+ }, cb)
+ }
+}
+
+function shouldUpdate (args, dir, dep, has, req, cb) {
+ // look up the most recent version.
+ // if that's what we already have, or if it's not on the args list,
+ // then dive into it. Otherwise, cb() with the data.
+
+ function skip () {
+ outdated_( args
+ , path.resolve(dir, "node_modules", dep)
+ , has
+ , cb )
+ }
+
+ function doIt (shouldHave) {
+ cb(null, [[ dir, dep, has[dep], shouldHave ]])
+ }
+
+ if (args.length && args.indexOf(dep) === -1) {
+ return skip()
+ }
+
+ // so, we can conceivably update this. find out if we need to.
+ cache.add(dep, req, function (er, d) {
+ // if this fails, then it means we can't update this thing.
+ // it's probably a thing that isn't published.
+ return (er || d.version === has[dep]) ? skip() : doIt(d.version)
+ })
+}
diff --git a/deps/npm/lib/owner.js b/deps/npm/lib/owner.js
new file mode 100644
index 0000000000..6025e58a75
--- /dev/null
+++ b/deps/npm/lib/owner.js
@@ -0,0 +1,188 @@
+
+module.exports = owner
+
+owner.usage = "npm owner add <username> <pkg>"
+ + "\nnpm owner rm <username> <pkg>"
+ + "\nnpm owner ls <pkg>"
+
+owner.completion = function (opts, cb) {
+ var argv = opts.conf.argv.remain
+ if (argv.length > 4) return cb()
+ if (argv.length <= 2) {
+ var subs = ["add", "rm"]
+ if (opts.partialWord === "l") subs.push("ls")
+ else subs.push("ls", "list")
+ return cb(null, subs)
+ }
+ var un = encodeURIComponent(npm.config.get("username"))
+ switch (argv[2]) {
+ case "ls":
+ if (argv.length > 3) return cb()
+ else return registry.get("/-/short", cb)
+
+ case "rm":
+ if (argv.length > 3) {
+ var theUser = encodeURIComponent(argv[3])
+ , uri = "/-/by-user/"+theUser+"|"+un
+ console.error(uri)
+ return registry.get(uri, function (er, d) {
+ if (er) return cb(er)
+ // return the intersection
+ return cb(null, d[theUser].filter(function (p) {
+ // kludge for server adminery.
+ return un === "isaacs" || d[un].indexOf(p) === -1
+ }))
+ })
+ }
+ // else fallthrough
+ case "add":
+ if (argv.length > 3) {
+ var theUser = encodeURIComponent(argv[3])
+ , uri = "/-/by-user/"+theUser+"|"+un
+ console.error(uri)
+ return registry.get(uri, function (er, d) {
+ console.error(uri, er || d)
+ // return mine that they're not already on.
+ if (er) return cb(er)
+ var mine = d[un] || []
+ , theirs = d[theUser] || []
+ return cb(null, mine.filter(function (p) {
+ return theirs.indexOf(p) === -1
+ }))
+ })
+ }
+ // just list all users who aren't me.
+ return registry.get("/-/users", function (er, list) {
+ if (er) return cb()
+ return cb(null, Object.keys(list).filter(function (n) {
+ return n !== un
+ }))
+ })
+
+ default:
+ return cb()
+ }
+}
+
+var registry = require("./utils/npm-registry-client/index.js")
+ , get = registry.request.GET
+ , put = registry.request.PUT
+ , log = require("./utils/log.js")
+ , output
+ , npm = require("./npm.js")
+
+function owner (args, cb) {
+ var action = args.shift()
+ switch (action) {
+ case "ls": case "list": return ls(args[0], cb)
+ case "add": return add(args[0], args[1], cb)
+ case "rm": case "remove": return rm(args[0], args[1], cb)
+ default: return unknown(action, cb)
+ }
+}
+
+function ls (pkg, cb) {
+ if (!pkg) return cb(owner.usage)
+ get(pkg, function (er, data) {
+ var msg = ""
+ if (er) return log.er(cb, "Couldn't get owner data for "+pkg)(er)
+ var owners = data.maintainers
+ if (!owners || !owners.length) msg = "admin party!"
+ else msg = owners.map(function (o) { return o.name +" <"+o.email+">" }).join("\n")
+ output = output || require("./utils/output.js")
+ output.write(msg, function (er) { cb(er, owners) })
+ })
+}
+
+function add (user, pkg, cb) {
+ if (!pkg) readLocalPkg(function (er, pkg) {
+ if (er) return cb(er)
+ if (!pkg) return cb(new Error(owner.usage))
+ add(user, pkg, cb)
+ })
+
+ log.verbose(user+" to "+pkg, "owner add")
+ mutate(pkg, user, function (u, owners) {
+ if (!owners) owners = []
+ for (var i = 0, l = owners.length; i < l; i ++) {
+ var o = owners[i]
+ if (o.name === u.name) {
+ log( "Already a package owner: "+o.name+" <"+o.email+">"
+ , "owner add"
+ )
+ return false
+ }
+ }
+ owners.push(u)
+ return owners
+ }, cb)
+}
+
+function rm (user, pkg, cb) {
+ if (!pkg) readLocalPkg(function (er, pkg) {
+ if (er) return cb(er)
+ if (!pkg) return cb(new Error(owner.usage))
+ rm(user, pkg, cb)
+ })
+
+ log.verbose(user+" from "+pkg, "owner rm")
+ mutate(pkg, null, function (u, owners) {
+ var found = false
+ , m = owners.filter(function (o) {
+ var match = (o.name === user)
+ found = found || match
+ return !match
+ })
+ if (!found) {
+ log("Not a package owner: "+user, "owner rm")
+ return false
+ }
+ if (!m.length) return new Error(
+ "Cannot remove all owners of a package. Add someone else first.")
+ return m
+ }, cb)
+}
+
+function mutate (pkg, user, mutation, cb) {
+ if (user) {
+ get("/-/user/org.couchdb.user:"+user, mutate_)
+ } else {
+ mutate_(null, null)
+ }
+
+ function mutate_ (er, u) {
+ if (er) return log.er(cb, "Error getting user data for "+user)(er)
+ if (user && (!u || u.error)) return cb(new Error(
+ "Couldn't get user data for "+user+": "+JSON.stringify(u)))
+ if (u) u = { "name" : u.name, "email" : u.email }
+ get("/"+pkg, function (er, data) {
+ if (er) return log.er(cb, "Couldn't get package data for "+pkg)(er)
+ var m = mutation(u, data.maintainers)
+ if (!m) return cb() // handled
+ if (m instanceof Error) return cb(m) // error
+ data = { _id : data._id
+ , _rev : data._rev
+ , maintainers : m
+ }
+ put("/"+pkg+"/-rev/"+data._rev, data, function (er, data) {
+ if (er) return log.er(cb, "Failed to update package metadata")(er)
+ if (data.error) return cb(new Error(
+ "Failed to update pacakge metadata: "+JSON.stringify(data)))
+ cb(null, data)
+ })
+ })
+ }
+}
+
+function readLocalPkg (cb) {
+ if (npm.config.get("global")) return cb()
+ var path = require("path")
+ , readJson = require("./utils/read-json.js")
+ readJson(path.resolve(npm.prefix, "package.json"), function (er, d) {
+ return cb(er, d && d.name)
+ })
+}
+
+function unknown (action, cb) {
+ cb("Usage: \n"+owner.usage)
+}
diff --git a/deps/npm/lib/pack.js b/deps/npm/lib/pack.js
new file mode 100644
index 0000000000..3475c4a3ef
--- /dev/null
+++ b/deps/npm/lib/pack.js
@@ -0,0 +1,66 @@
+// npm pack <pkg>
+// Packs the specified package into a .tgz file, which can then
+// be installed.
+
+module.exports = pack
+
+var npm = require("./npm.js")
+ , install = require("./install.js")
+ , cache = require("./cache.js")
+ , output = require("./utils/output.js")
+ , fs = require("graceful-fs")
+ , chain = require("slide").chain
+ , path = require("path")
+ , relativize = require("./utils/relativize.js")
+ , cwd = process.cwd()
+
+pack.usage = "npm pack <pkg>"
+
+// if it can be installed, it can be packed.
+pack.completion = install.completion
+
+function pack (args, silent, cb) {
+ if (typeof cb !== "function") cb = silent, silent = false
+
+ if (args.length === 0) args = ["."]
+
+ chain(args.map(function (arg) { return function (cb) {
+ pack_(arg, cb)
+ }}), function (er, files) {
+ if (er || silent) return cb(er, files)
+ printFiles(files, cb)
+ })
+}
+
+function printFiles (files, cb) {
+ files = files.map(function (file) {
+ return relativize(file, cwd)
+ })
+ output.write(files.join("\n"), cb)
+}
+
+// add to cache, then cp to the cwd
+function pack_ (pkg, cb) {
+ cache.add(pkg, function (er, data) {
+ if (er) return cb(er)
+ var fname = path.resolve(data._id.replace(/@/g, "-") + ".tgz")
+ , cached = path.resolve( npm.cache
+ , data.name
+ , data.version
+ , "package.tgz" )
+ , from = fs.createReadStream(cached)
+ , to = fs.createWriteStream(fname)
+ , errState = null
+
+ from.on("error", cb_)
+ to.on("error", cb_)
+ to.on("close", cb_)
+ from.pipe(to)
+
+ function cb_ (er) {
+ if (errState) return
+ if (er) return cb(errState = er)
+ cb(null, fname)
+ }
+ })
+}
diff --git a/deps/npm/lib/prefix.js b/deps/npm/lib/prefix.js
new file mode 100644
index 0000000000..f37aacf2c5
--- /dev/null
+++ b/deps/npm/lib/prefix.js
@@ -0,0 +1,10 @@
+module.exports = prefix
+
+var npm = require("./npm.js")
+ , output = require("./utils/output.js")
+
+prefix.usage = "npm prefix\nnpm prefix -g\n(just prints the prefix folder)"
+
+function prefix (args, cb) {
+ output.write(npm.prefix, function (er) { cb(er, npm.prefix) })
+}
diff --git a/deps/npm/lib/prune.js b/deps/npm/lib/prune.js
new file mode 100644
index 0000000000..4a02dc1f8a
--- /dev/null
+++ b/deps/npm/lib/prune.js
@@ -0,0 +1,40 @@
+// prune extraneous packages.
+
+module.exports = prune
+
+prune.usage = "npm prune"
+
+var readInstalled = require("./utils/read-installed.js")
+ , npm = require("./npm.js")
+
+prune.completion = require("./utils/completion/installed-deep.js")
+
+function prune (args, cb) {
+ readInstalled(npm.prefix, function (er, data) {
+ if (er) return cb(er)
+ prune_(args, data, cb)
+ })
+}
+
+function prune_ (args, data, cb) {
+ npm.commands.unbuild(prunables(args, data, []), cb)
+}
+
+function prunables (args, data, seen) {
+ var deps = data.dependencies || {}
+ return Object.keys(deps).map(function (d) {
+ if (typeof deps[d] !== "object"
+ || seen.indexOf(deps[d]) !== -1) return null
+ seen.push(deps[d])
+ if (deps[d].extraneous
+ && (args.length === 0 || args.indexOf(d) !== -1)) {
+ var extra = deps[d]
+ delete deps[d]
+ return extra.path
+ }
+ return prunables(args, deps[d], seen)
+ }).filter(function (d) { return d !== null })
+ .reduce(function FLAT (l, r) {
+ return l.concat(Array.isArray(r) ? r.reduce(FLAT,[]) : r)
+ }, [])
+}
diff --git a/deps/npm/lib/publish.js b/deps/npm/lib/publish.js
new file mode 100644
index 0000000000..a5855ffdee
--- /dev/null
+++ b/deps/npm/lib/publish.js
@@ -0,0 +1,166 @@
+
+module.exports = publish
+
+var npm = require("./npm.js")
+ , registry = require("./utils/npm-registry-client/index.js")
+ , log = require("./utils/log.js")
+ , tar = require("./utils/tar.js")
+ , sha = require("./utils/sha.js")
+ , path = require("path")
+ , readJson = require("./utils/read-json.js")
+ , fs = require("graceful-fs")
+ , lifecycle = require("./utils/lifecycle.js")
+ , chain = require("slide").chain
+ , output = require("./utils/output.js")
+
+publish.usage = "npm publish <tarball>"
+ + "\nnpm publish <folder>"
+ + "\n\nPublishes '.' if no argument supplied"
+
+publish.completion = function (opts, cb) {
+ // publish can complete to a folder with a package.json
+ // or a tarball, or a tarball url.
+ // for now, not yet implemented.
+ return cb()
+}
+
+function publish (args, isRetry, cb) {
+ if (typeof cb !== "function") cb = isRetry, isRetry = false
+ if (args.length === 0) args = ["."]
+ if (args.length !== 1) return cb(publish.usage)
+
+ log.verbose(args, "publish")
+ var arg = args[0]
+ // if it's a local folder, then run the prepublish there, first.
+ readJson(path.resolve(arg, "package.json"), function (er, data) {
+ // error is ok. could be publishing a url or tarball
+ if (er) return cacheAddPublish(arg, false, isRetry, cb)
+ lifecycle(data, "prepublish", arg, function (er) {
+ if (er) return cb(er)
+ cacheAddPublish(arg, true, isRetry, cb)
+ })
+ })
+}
+
+function cacheAddPublish (arg, didPre, isRetry, cb) {
+ npm.commands.cache.add(arg, function (er, data) {
+ if (er) return cb(er)
+ log.silly(data, "publish")
+ var cachedir = path.resolve( npm.cache
+ , data.name
+ , data.version
+ , "package" )
+ chain
+ ( [ !didPre && [lifecycle, data, "prepublish", cachedir]
+ , [publish_, arg, data, isRetry, cachedir]
+ , [lifecycle, data, "publish", cachedir]
+ , [lifecycle, data, "postpublish", cachedir] ]
+ , cb )
+ })
+}
+
+function publish_ (arg, data, isRetry, cachedir, cb) {
+ if (!data) return cb(new Error("no package.json file found"))
+
+ // check for publishConfig hash
+ if (data.publishConfig) {
+ Object.keys(data.publishConfig).forEach(function (k) {
+ log.info(k + "=" + data.publishConfig[k], "publishConfig")
+ npm.config.set(k, data.publishConfig[k])
+ })
+ }
+
+ delete data.modules
+ if (data.private) return cb(new Error
+ ("This package has been marked as private\n"
+ +"Remove the 'private' field from the package.json to publish it."))
+
+ // pre-build
+ var bd = data.scripts
+ && ( data.scripts.preinstall
+ || data.scripts.install
+ || data.scripts.postinstall )
+ && npm.config.get("bindist")
+ && npm.config.get("bin-publish")
+ preBuild(data, bd, function (er, tb) {
+ if (er) return cb(er)
+ return regPublish(data, tb, isRetry, arg, cachedir, cb)
+ })
+}
+
+
+function preBuild (data, bd, cb) {
+ if (!bd) return cb()
+ // unpack to cache/n/v/build
+ // build there
+ // pack to cache/package-<bd>.tgz
+ var cf = path.resolve(npm.cache, data.name, data.version)
+ var pb = path.resolve(cf, "build")
+ , buildTarget = path.resolve(pb, "node_modules", data.name)
+ , tb = path.resolve(cf, "package-"+bd+".tgz")
+ , sourceBall = path.resolve(cf, "package.tgz")
+
+ log.verbose("about to cache unpack")
+ log.verbose(sourceBall, "the tarball")
+ npm.commands.install(pb, sourceBall, function (er) {
+ log.info(data._id, "prebuild done")
+ // build failure just means that we can't prebuild
+ if (er) {
+ log.warn(er.message, "prebuild failed "+bd)
+ return cb()
+ }
+ // now strip the preinstall/install scripts
+ // they've already been run.
+ var pbj = path.resolve(buildTarget, "package.json")
+ readJson(pbj, function (er, pbo) {
+ if (er) return cb(er)
+ if (pbo.scripts) {
+ delete pbo.scripts.preinstall
+ delete pbo.scripts.install
+ delete pbo.scripts.postinstall
+ }
+ pbo.prebuilt = bd
+ pbo.files = pbo.files || []
+ pbo.files.push("build")
+ pbo.files.push("build/")
+ pbo.files.push("*.node")
+ pbo.files.push("*.js")
+ fs.writeFile(pbj, JSON.stringify(pbo, null, 2), function (er) {
+ if (er) return cb(er)
+ tar.pack(tb, buildTarget, pbo, true, function (er) {
+ if (er) return cb(er)
+ // try to validate the shasum, too
+ sha.get(tb, function (er, shasum) {
+ if (er) return cb(er)
+ // binary distribution requires shasum checking.
+ if (!shasum) return cb()
+ data.dist.bin = data.dist.bin || {}
+ data.dist.bin[bd] = data.dist.bin[bd] || {}
+ data.dist.bin[bd].shasum = shasum
+ return cb(null, tb)
+ })
+ })
+ })
+ })
+ })
+}
+
+function regPublish (data, prebuilt, isRetry, arg, cachedir, cb) {
+ // check to see if there's a README.md in there.
+ var readme = path.resolve(cachedir, "README.md")
+ fs.readFile(readme, function (er, readme) {
+ // ignore error. it's an optional feature
+ registry.publish(data, prebuilt, readme, function (er) {
+ if (er && er.errno === npm.EPUBLISHCONFLICT
+ && npm.config.get("force") && !isRetry) {
+ log.warn("Forced publish over "+data._id, "publish")
+ return npm.commands.unpublish([data._id], function (er) {
+ // ignore errors. Use the force. Reach out with your feelings.
+ publish([arg], true, cb)
+ })
+ }
+ if (er) return cb(er)
+ output.write("+ " + data._id, cb)
+ })
+ })
+}
diff --git a/deps/npm/lib/rebuild.js b/deps/npm/lib/rebuild.js
new file mode 100644
index 0000000000..f3f7596204
--- /dev/null
+++ b/deps/npm/lib/rebuild.js
@@ -0,0 +1,68 @@
+
+module.exports = rebuild
+
+var readInstalled = require("./utils/read-installed.js")
+ , semver = require("semver")
+ , log = require("./utils/log.js")
+ , path = require("path")
+ , npm = require("./npm.js")
+ , output = require("./utils/output.js")
+
+rebuild.usage = "npm rebuild [<name>[@<version>] [name[@<version>] ...]]"
+
+rebuild.completion = require("./utils/completion/installed-deep.js")
+
+function rebuild (args, cb) {
+ readInstalled(npm.prefix, function (er, data) {
+ log(typeof data, "read Installed")
+ if (er) return cb(er)
+ var set = filter(data, args)
+ , folders = Object.keys(set).filter(function (f) {
+ return f !== npm.prefix
+ })
+ if (!folders.length) return cb()
+ log.silly(folders, "rebuild set")
+ npm.commands.build(folders, function (er) {
+ if (er) return cb(er)
+ output.write(folders.map(function (f) {
+ return set[f] + " " + f
+ }).join("\n"), cb)
+ })
+ })
+}
+
+function filter (data, args, set, seen) {
+ if (!set) set = {}
+ if (!seen) seen = {}
+ if (set.hasOwnProperty(data.path)) return set
+ if (seen.hasOwnProperty(data.path)) return set
+ seen[data.path] = true
+ var pass
+ if (!args.length) pass = true // rebuild everything
+ else if (data.name && data._id) {
+ for (var i = 0, l = args.length; i < l; i ++) {
+ var arg = args[i]
+ , nv = arg.split("@")
+ , n = nv.shift()
+ , v = nv.join("@")
+ if (n !== data.name) continue
+ if (!semver.satisfies(data.version, v)) continue
+ pass = true
+ break
+ }
+ }
+ if (pass && data._id) {
+ log.verbose([data.path, data._id], "path id")
+ set[data.path] = data._id
+ }
+ // need to also dive through kids, always.
+ // since this isn't an install these won't get auto-built unless
+ // they're not dependencies.
+ Object.keys(data.dependencies || {}).forEach(function (d) {
+ // return
+ var dep = data.dependencies[d]
+ if (typeof dep === "string") return
+ filter(dep, args, set, seen)
+ })
+ return set
+}
diff --git a/deps/npm/lib/restart.js b/deps/npm/lib/restart.js
new file mode 100644
index 0000000000..69c4b913db
--- /dev/null
+++ b/deps/npm/lib/restart.js
@@ -0,0 +1 @@
+module.exports = require("./utils/lifecycle.js").cmd("restart")
diff --git a/deps/npm/lib/root.js b/deps/npm/lib/root.js
new file mode 100644
index 0000000000..6062ec2202
--- /dev/null
+++ b/deps/npm/lib/root.js
@@ -0,0 +1,11 @@
+module.exports = root
+
+var npm = require("./npm.js")
+ , output = require("./utils/output.js")
+ , log = require("./utils/log.js")
+
+root.usage = "npm root\nnpm root -g\n(just prints the root folder)"
+
+function root (args, cb) {
+ output.write(npm.dir, function (er) { cb(er, npm.dir) })
+}
diff --git a/deps/npm/lib/run-script.js b/deps/npm/lib/run-script.js
new file mode 100644
index 0000000000..4a4d2dc610
--- /dev/null
+++ b/deps/npm/lib/run-script.js
@@ -0,0 +1,100 @@
+
+module.exports = runScript
+
+var lifecycle = require("./utils/lifecycle.js")
+ , npm = require("./npm.js")
+ , path = require("path")
+ , readJson = require("./utils/read-json.js")
+ , log = require("./utils/log.js")
+ , chain = require("slide").chain
+ , fs = require("graceful-fs")
+ , asyncMap = require("slide").asyncMap
+
+runScript.usage = "npm run-script [<pkg>] <command>"
+
+runScript.completion = function (opts, cb) {
+
+ // see if there's already a package specified.
+ var argv = opts.conf.argv.remain
+ , installedShallow = require("./utils/completion/installed-shallow.js")
+
+ if (argv.length >= 4) return cb()
+
+ if (argv.length === 3) {
+ // either specified a script locally, in which case, done,
+ // or a package, in which case, complete against its scripts
+ var json = path.join(npm.prefix, "package.json")
+ return readJson(json, function (er, d) {
+ if (er) d = {}
+ var scripts = Object.keys(d.scripts || {})
+ console.error("local scripts", scripts)
+ if (scripts.indexOf(argv[2]) !== -1) return cb()
+ // ok, try to find out which package it was, then
+ var pref = npm.config.get("global") ? npm.config.get("prefix")
+ : npm.prefix
+ var pkgDir = path.resolve( pref, "node_modules"
+ , argv[2], "package.json" )
+ console.error("global?", npm.config.get("global"))
+ console.error(pkgDir, "package dir")
+ readJson(pkgDir, function (er, d) {
+ if (er) d = {}
+ var scripts = Object.keys(d.scripts || {})
+ return cb(null, scripts)
+ })
+ })
+ }
+
+ // complete against the installed-shallow, and the pwd's scripts.
+ // but only packages that have scripts
+ var installed
+ , scripts
+ installedShallow(opts, function (d) {
+ return d.scripts
+ }, function (er, inst) {
+ installed = inst
+ next()
+ })
+
+ if (npm.config.get("global")) scripts = [], next()
+ else readJson(path.join(npm.prefix, "package.json"), function (er, d) {
+ d = d || {}
+ scripts = Object.keys(d.scripts || {})
+ next()
+ })
+
+ function next () {
+ if (!installed || !scripts) return
+ return cb(null, scripts.concat(installed))
+ }
+}
+
+function runScript (args, cb) {
+ var pkgdir = args.length === 1 ? process.cwd()
+ : path.resolve(npm.dir, args[0])
+ , cmd = args.pop()
+
+ readJson(path.resolve(pkgdir, "package.json"), function (er, d) {
+ if (er) return cb(er)
+ run(d, pkgdir, cmd, cb)
+ })
+}
+
+function run (pkg, wd, cmd, cb) {
+ var cmds = []
+ if (!pkg.scripts) pkg.scripts = {}
+ if (cmd === "restart") {
+ cmds = ["prestop","stop","poststop"
+ ,"restart"
+ ,"prestart","start","poststart"]
+ } else {
+ cmds = [cmd]
+ }
+ if (!cmd.match(/^(pre|post)/)) {
+ cmds = ["pre"+cmd].concat(cmds).concat("post"+cmd)
+ }
+ log.verbose(cmds, "run-script")
+ chain(cmds.map(function (c) {
+ // when running scripts explicitly, assume that they're trusted.
+ return [lifecycle, pkg, c, wd, true]
+ }), cb)
+}
diff --git a/deps/npm/lib/search.js b/deps/npm/lib/search.js
new file mode 100644
index 0000000000..45e436a0c1
--- /dev/null
+++ b/deps/npm/lib/search.js
@@ -0,0 +1,222 @@
+
+module.exports = exports = search
+
+var npm = require("./npm.js")
+ , registry = require("./utils/npm-registry-client/index.js")
+ , semver = require("semver")
+ , output
+ , log = require("./utils/log.js")
+
+search.usage = "npm search [some search terms ...]"
+
+search.completion = function (opts, cb) {
+ var compl = {}
+ , partial = opts.partialWord
+ , ipartial = partial.toLowerCase()
+ , plen = partial.length
+
+ // get the batch of data that matches so far.
+ // this is an example of using npm.commands.search programmatically
+ // to fetch data that has been filtered by a set of arguments.
+ search(opts.conf.argv.remain.slice(2), true, function (er, data) {
+ if (er) return cb(er)
+ Object.keys(data).forEach(function (name) {
+ data[name].words.split(" ").forEach(function (w) {
+ if (w.toLowerCase().indexOf(ipartial) === 0) {
+ compl[partial + w.substr(plen)] = true
+ }
+ })
+ })
+ cb(null, Object.keys(compl))
+ })
+}
+
+function search (args, silent, staleness, cb_) {
+ if (typeof cb_ !== "function") cb_ = staleness, staleness = 600
+ if (typeof cb_ !== "function") cb_ = silent, silent = false
+
+ var searchopts = npm.config.get("searchopts")
+ , searchexclude = npm.config.get("searchexclude")
+ if (typeof searchopts !== "string") searchopts = ""
+ searchopts = searchopts.split(/\s+/)
+ if (typeof searchexclude === "string") {
+ searchexclude = searchexclude.split(/\s+/)
+ } else searchexclude = []
+ var opts = searchopts.concat(args).map(function (s) {
+ return s.toLowerCase()
+ }).filter(function (s) { return s })
+ searchexclude = searchexclude.map(function (s) {
+ return s.toLowerCase()
+ })
+ getFilteredData( staleness, opts, searchexclude, function (er, data) {
+ // now data is the list of data that we want to show.
+ // prettify and print it, and then provide the raw
+ // data to the cb.
+ if (er || silent) return cb_(er, data)
+ function cb (er) { return cb_(er, data) }
+ output = output || require("./utils/output.js")
+ output.write(prettify(data, args), cb)
+ })
+}
+
+function getFilteredData (staleness, args, notArgs, cb) {
+ registry.get( "/-/all", null, staleness, false
+ , true, function (er, data) {
+ if (er) return cb(er)
+ return cb(null, filter(data, args, notArgs))
+ })
+}
+
+function filter (data, args, notArgs) {
+ // data={<name>:{package data}}
+ return Object.keys(data).map(function (d) {
+ return data[d]
+ }).filter(function (d) {
+ return typeof d === "object"
+ }).map(stripData).map(getWords).filter(function (data) {
+ return filterWords(data, args, notArgs)
+ }).reduce(function (l, r) {
+ l[r.name] = r
+ return l
+ }, {})
+}
+
+function stripData (data) {
+ return { name:data.name
+ , description:npm.config.get("description") ? data.description : ""
+ , maintainers:(data.maintainers || []).map(function (m) {
+ return "=" + m.name
+ })
+ , url:!Object.keys(data.versions || {}).length ? data.url : null
+ , keywords:data.keywords || []
+ }
+}
+
+function getWords (data) {
+ data.words = [ data.name ]
+ .concat(data.description)
+ .concat(data.maintainers)
+ .concat(data.url && ("<" + data.url + ">"))
+ .concat(data.keywords)
+ .map(function (f) { return f && f.trim && f.trim() })
+ .filter(function (f) { return f })
+ .join(" ")
+ .toLowerCase()
+ return data
+}
+
+function filterWords (data, args, notArgs) {
+ var words = data.words
+ for (var i = 0, l = args.length; i < l; i ++) {
+ if (words.indexOf(args[i]) === -1) {
+ return false
+ }
+ }
+ for (var i = 0, l = notArgs.length; i < l; i ++) {
+ if (words.indexOf(notArgs[i]) !== -1) return false
+ }
+ return true
+}
+
+function prettify (data, args) {
+ try {
+ var tty = require("tty")
+ , stdout = process.stdout
+ , cols = !tty.isatty(stdout.fd) ? Infinity
+ : stdout._handle ? stdout._handle.getWindowSize()[0]
+ : tty.getWindowSize()[1]
+ } catch (ex) { cols = Infinity }
+
+ // name, desc, author, keywords
+ var longest = []
+ , spaces
+ , maxLen = [20, 60, 20, Infinity]
+ , headings = ["NAME", "DESCRIPTION", "AUTHOR", "KEYWORDS"]
+ , lines
+
+ lines = Object.keys(data).map(function (d) {
+ return data[d]
+ }).filter(function (data) {
+ return data.name
+ }).map(function (data) {
+ // turn a pkg data into a string
+ // [name,who,desc,targets,keywords] tuple
+ // also set longest to the longest name
+ if (typeof data.keywords === "string") {
+ data.keywords = data.keywords.split(/[,\s]+/)
+ }
+ if (!Array.isArray(data.keywords)) data.keywords = []
+ var l = [ data.name
+ , data.description || ""
+ , data.maintainers.join(" ")
+ , (data.keywords || []).join(" ")
+ ]
+ l.forEach(function (s, i) {
+ var len = s.length
+ longest[i] = Math.min(maxLen[i] || Infinity
+ ,Math.max(longest[i] || 0, len))
+ if (len > longest[i]) {
+ l._undent = l._undent || []
+ l._undent[i] = len - longest[i]
+ }
+ l[i] = l[i].replace(/\s+/g, " ")
+ })
+ return l
+ }).map(function (line) {
+ return line.map(function (s, i) {
+ spaces = spaces || longest.map(function (n) {
+ return new Array(n + 2).join(" ")
+ })
+ var len = s.length
+ if (line._undent && line._undent[i - 1]) {
+ len += line._undent[i - 1] - 1
+ }
+ return s + spaces[i].substr(len)
+ }).join(" ").substr(0, cols).trim()
+ }).sort(function (a, b) {
+ return a === b ? 0 : a > b ? 1 : -1
+ }).map(function (line) {
+ // colorize!
+ args.forEach(function (arg, i) {
+ line = addColorMarker(line, arg, i)
+ })
+ return colorize(line).trim()
+ })
+
+ if (lines.length === 0) {
+ return "No match found for "+(args.map(JSON.stringify).join(" "))
+ }
+
+ // build the heading padded to the longest in each field
+ return headings.map(function (h, i) {
+ var space = Math.max(2, 3 + (longest[i] || 0) - h.length)
+ return h + (new Array(space).join(" "))
+ }).join("").substr(0, cols).trim() + "\n" + lines.join("\n")
+
+}
+
+var colors = [31, 33, 32, 36, 34, 35 ]
+ , cl = colors.length
+function addColorMarker (str, arg, i) {
+ var m = i % cl + 1
+ , markStart = String.fromCharCode(m)
+ , markEnd = String.fromCharCode(0)
+ , pieces = str.toLowerCase().split(arg.toLowerCase())
+ , p = 0
+ return pieces.map(function (piece, i) {
+ piece = str.substr(p, piece.length)
+ var mark = markStart
+ + str.substr(p+piece.length, arg.length)
+ + markEnd
+ p += piece.length + arg.length
+ return piece + mark
+ }).join("")
+ return str.split(arg).join(mark)
+}
+function colorize (line) {
+ for (var i = 0; i < cl; i ++) {
+ var m = i + 1
+ line = line.split(String.fromCharCode(m)).join("\033["+colors[i]+"m")
+ }
+ return line.split("\u0000").join("\033[0m")
+}
diff --git a/deps/npm/lib/set.js b/deps/npm/lib/set.js
new file mode 100644
index 0000000000..d821095157
--- /dev/null
+++ b/deps/npm/lib/set.js
@@ -0,0 +1,12 @@
+
+module.exports = set
+
+set.usage = "npm set <key> <value> (See `npm config`)"
+
+var npm = require("./npm.js")
+
+set.completion = npm.commands.config.completion
+
+function set (args, cb) {
+ npm.commands.config(["set"].concat(args), cb)
+}
diff --git a/deps/npm/lib/star.js b/deps/npm/lib/star.js
new file mode 100644
index 0000000000..d84fa02ed4
--- /dev/null
+++ b/deps/npm/lib/star.js
@@ -0,0 +1,34 @@
+
+module.exports = star
+
+var npm = require("./npm.js")
+ , registry = require("./utils/npm-registry-client/index.js")
+ , log = require("./utils/log.js")
+ , asyncMap = require("slide").asyncMap
+ , output = require("./utils/output.js")
+
+star.usage = "npm star <package> [pkg, pkg, ...]\n"
+ + "npm unstar <package> [pkg, pkg, ...]"
+
+star.completion = function (opts, cb) {
+ registry.get("/-/short", null, 60000, function (er, list) {
+ return cb(null, list || [])
+ })
+}
+
+function star (args, cb) {
+ if (!args.length) return cb(star.usage)
+ var s = npm.config.get("unicode") ? "\u2605 " : "(*)"
+ , u = npm.config.get("unicode") ? "\u2606 " : "( )"
+ , using = !(npm.command.match(/^un/))
+ if (!using) s = u
+ asyncMap(args, function (pkg, cb) {
+ registry.star(pkg, using, function (er, data, raw, req) {
+ if (!er) {
+ output.write(s + " "+pkg, npm.config.get("outfd"))
+ log.verbose(data, "back from star/unstar")
+ }
+ cb(er, data, raw, req)
+ })
+ }, cb)
+}
diff --git a/deps/npm/lib/start.js b/deps/npm/lib/start.js
new file mode 100644
index 0000000000..98823825bb
--- /dev/null
+++ b/deps/npm/lib/start.js
@@ -0,0 +1 @@
+module.exports = require("./utils/lifecycle.js").cmd("start")
diff --git a/deps/npm/lib/stop.js b/deps/npm/lib/stop.js
new file mode 100644
index 0000000000..8ea5ba6aa6
--- /dev/null
+++ b/deps/npm/lib/stop.js
@@ -0,0 +1 @@
+module.exports = require("./utils/lifecycle.js").cmd("stop")
diff --git a/deps/npm/lib/submodule.js b/deps/npm/lib/submodule.js
new file mode 100644
index 0000000000..92fb41f883
--- /dev/null
+++ b/deps/npm/lib/submodule.js
@@ -0,0 +1,93 @@
+// npm submodule <pkg>
+// Check the package contents for a git repository url.
+// If there is one, then create a git submodule in the node_modules folder.
+
+module.exports = submodule
+
+var npm = require("./npm.js")
+ , readJson = require("./utils/read-json.js")
+ , exec = require("./utils/exec.js")
+ , output = require("./utils/output.js")
+ , cache = require("./cache.js")
+ , asyncMap = require("slide").asyncMap
+ , chain = require("slide").chain
+
+submodule.usage = "npm submodule <pkg>"
+
+submodule.completion = require("./docs.js").completion
+
+function submodule (args, cb) {
+ if (npm.config.get("global")) {
+ return cb(new Error("Cannot use submodule command in global mode."))
+ }
+
+ if (args.length === 0) return cb(submodule.usage)
+
+ asyncMap(args, function (arg, cb) {
+ cache.add(arg, cb)
+ }, function (er, pkgs) {
+ if (er) return cb(er)
+ chain(pkgs.map(function (pkg) { return function (cb) {
+ submodule_(pkg, cb)
+ }}), cb)
+ })
+
+}
+
+function submodule_ (pkg, cb) {
+ if (!pkg.repository
+ || pkg.repository.type !== "git"
+ || !pkg.repository.url) {
+ return cb(new Error(pkg._id + ": No git repository listed"))
+ }
+
+ // prefer https:// github urls
+ pkg.repository.url = pkg.repository.url
+ .replace(/^(git:\/\/)?(git@)?github.com[:\/]/, "https://github.com/")
+
+ // first get the list of submodules, and update if it's already there.
+ getSubmodules(function (er, modules) {
+ if (er) return cb(er)
+ // if there's already a submodule, then just update it.
+ if (modules.indexOf(pkg.name) !== -1) {
+ return updateSubmodule(pkg.name, cb)
+ }
+ addSubmodule(pkg.name, pkg.repository.url, cb)
+ })
+}
+
+function updateSubmodule (name, cb) {
+ exec( "git", [ "submodule", "update", "--init"
+ , "node_modules/" + name ]
+ , null, true, npm.prefix, cb)
+}
+
+function addSubmodule (name, url, cb) {
+ exec( "git", [ "submodule", "add", url
+ , "node_modules/" + name ]
+ , null, true, npm.prefix, function (er) {
+ if (er) return cb(er)
+ updateSubmodule(name, cb)
+ })
+}
+
+
+var getSubmodules = function getSubmodules (cb) {
+ exec( "git", ["submodule", "status"], null, false
+ , npm.prefix, function (er, code, stdout, stderr) {
+ if (er) return cb(er)
+ res = stdout.trim().split(/\n/).map(function (line) {
+ return line.trim().split(/\s+/)[1]
+ }).filter(function (line) {
+ // only care about submodules in the node_modules folder.
+ return line && line.match(/^node_modules\//)
+ }).map(function (line) {
+ return line.replace(/^node_modules\//g, "")
+ })
+
+ // memoize.
+ getSubmodules = function (cb) { return cb(null, res) }
+
+ cb(null, res)
+ })
+}
diff --git a/deps/npm/lib/tag.js b/deps/npm/lib/tag.js
new file mode 100644
index 0000000000..06aa70c5e0
--- /dev/null
+++ b/deps/npm/lib/tag.js
@@ -0,0 +1,17 @@
+// turns out tagging isn't very complicated
+// all the smarts are in the couch.
+module.exports = tag
+tag.usage = "npm tag <project>@<version> [<tag>]"
+
+tag.completion = require("./unpublish.js").completion
+
+var npm = require("./npm.js")
+
+function tag (args, cb) {
+ var thing = (args.shift() || "").split("@")
+ , project = thing.shift()
+ , version = thing.join("@")
+ , t = args.shift() || npm.config.get("tag")
+ if (!project || !version || !t) return cb("Usage:\n"+tag.usage)
+ require("./utils/npm-registry-client/index.js").tag(project, version, t, cb)
+}
diff --git a/deps/npm/lib/test.js b/deps/npm/lib/test.js
new file mode 100644
index 0000000000..1d2ee30a32
--- /dev/null
+++ b/deps/npm/lib/test.js
@@ -0,0 +1 @@
+module.exports = require("./utils/lifecycle.js").cmd("test")
diff --git a/deps/npm/lib/unbuild.js b/deps/npm/lib/unbuild.js
new file mode 100644
index 0000000000..f4bd008a66
--- /dev/null
+++ b/deps/npm/lib/unbuild.js
@@ -0,0 +1,85 @@
+module.exports = unbuild
+unbuild.usage = "npm unbuild <folder>\n(this is plumbing)"
+
+var readJson = require("./utils/read-json.js")
+ , rm = require("rimraf")
+ , npm = require("./npm.js")
+ , path = require("path")
+ , fs = require("graceful-fs")
+ , lifecycle = require("./utils/lifecycle.js")
+ , asyncMap = require("slide").asyncMap
+ , chain = require("slide").chain
+ , log = require("./utils/log.js")
+ , build = require("./build.js")
+
+// args is a list of folders.
+// remove any bins/etc, and then delete the folder.
+function unbuild (args, cb) { asyncMap(args, unbuild_, cb) }
+
+function unbuild_ (folder, cb) {
+ folder = path.resolve(folder)
+ delete build._didBuild[folder]
+ log.info(folder, "unbuild")
+ readJson(path.resolve(folder, "package.json"), function (er, pkg) {
+ // if no json, then just trash it, but no scripts or whatever.
+ if (er) return rm(folder, cb)
+ readJson.clearCache(folder)
+ chain
+ ( [ [lifecycle, pkg, "preuninstall", folder, false, true]
+ , [lifecycle, pkg, "uninstall", folder, false, true]
+ , [rmStuff, pkg, folder]
+ , [lifecycle, pkg, "postuninstall", folder, false, true]
+ , [rm, folder] ]
+ , cb )
+ })
+}
+
+function rmStuff (pkg, folder, cb) {
+ // if it's global, and folder is in {prefix}/node_modules,
+ // then bins are in {prefix}/bin
+ // otherwise, then bins are in folder/../.bin
+ var parent = path.dirname(folder)
+ , gnm = npm.dir
+ , top = gnm === parent
+
+ log.verbose([top, gnm, parent], "unbuild "+pkg._id)
+ asyncMap([rmBins, rmMans], function (fn, cb) {
+ fn(pkg, folder, parent, top, cb)
+ }, cb)
+}
+
+function rmBins (pkg, folder, parent, top, cb) {
+ if (!pkg.bin) return cb()
+ var binRoot = top ? npm.bin : path.resolve(parent, ".bin")
+ log.verbose([binRoot, pkg.bin], "binRoot")
+ asyncMap(Object.keys(pkg.bin), function (b, cb) {
+ if (process.platform === "win32") {
+ rm(path.resolve(binRoot, b) + ".cmd", cb)
+ } else {
+ rm( path.resolve(binRoot, b)
+ , { gently: !npm.config.get("force") && folder }
+ , cb )
+ }
+ }, cb)
+}
+
+function rmMans (pkg, folder, parent, top, cb) {
+ if (!pkg.man || !top || process.platform === "win32") return cb()
+ var manRoot = path.resolve(npm.config.get("prefix"), "share", "man")
+ asyncMap(pkg.man, function (man, cb) {
+ var parseMan = man.match(/(.*)\.([0-9]+)(\.gz)?$/)
+ , stem = parseMan[1]
+ , sxn = parseMan[2]
+ , gz = parseMan[3] || ""
+ , bn = path.basename(stem)
+ , manDest = path.join( manRoot
+ , "man"+sxn
+ , (bn.indexOf(pkg.name) === 0 ? bn
+ : pkg.name + "-" + bn)
+ + "." + sxn + gz
+ )
+ rm( manDest
+ , { gently: !npm.config.get("force") && folder }
+ , cb )
+ }, cb)
+}
diff --git a/deps/npm/lib/uninstall.js b/deps/npm/lib/uninstall.js
new file mode 100644
index 0000000000..1b43607d6d
--- /dev/null
+++ b/deps/npm/lib/uninstall.js
@@ -0,0 +1,55 @@
+
+// remove a package.
+
+module.exports = uninstall
+
+uninstall.usage = "npm uninstall <name>[@<version> [<name>[@<version>] ...]"
+ + "\nnpm rm <name>[@<version> [<name>[@<version>] ...]"
+
+uninstall.completion = require("./utils/completion/installed-shallow.js")
+
+var fs = require("graceful-fs")
+ , log = require("./utils/log.js")
+ , readJson = require("./utils/read-json.js")
+ , path = require("path")
+ , npm = require("./npm.js")
+ , semver = require("semver")
+ , asyncMap = require("slide").asyncMap
+
+function uninstall (args, cb) {
+ // this is super easy
+ // get the list of args that correspond to package names in either
+ // the global npm.dir,
+ // then call unbuild on all those folders to pull out their bins
+ // and mans and whatnot, and then delete the folder.
+
+ var nm = npm.dir
+ if (args.length === 1 && args[0] === ".") args = []
+ if (args.length) return uninstall_(args, nm, cb)
+
+ // remove this package from the global space, if it's installed there
+ if (npm.config.get("global")) return cb(uninstall.usage)
+ readJson(path.resolve(npm.prefix, "package.json"), function (er, pkg) {
+ if (er) return cb(uninstall.usage)
+ uninstall_( [pkg.name]
+ , npm.dir
+ , cb )
+ })
+
+}
+
+function uninstall_ (args, nm, cb) {
+ asyncMap(args, function (arg, cb) {
+ var p = path.resolve(nm, arg)
+ fs.lstat(p, function (er) {
+ if (er) {
+ log.warn(arg, "Not installed in "+nm)
+ return cb(null, [])
+ }
+ cb(null, p)
+ })
+ }, function (er, folders) {
+ if (er) return cb(er)
+ asyncMap(folders, npm.commands.unbuild, cb)
+ })
+}
diff --git a/deps/npm/lib/unpublish.js b/deps/npm/lib/unpublish.js
new file mode 100644
index 0000000000..2945a7887f
--- /dev/null
+++ b/deps/npm/lib/unpublish.js
@@ -0,0 +1,69 @@
+
+module.exports = unpublish
+
+var registry = require("./utils/npm-registry-client/index.js")
+ , log = require("./utils/log.js")
+ , npm = require("./npm.js")
+ , readJson = require("./utils/read-json.js")
+ , path = require("path")
+ , output = require("./utils/output.js")
+
+unpublish.usage = "npm unpublish <project>[@<version>]"
+
+unpublish.completion = function (opts, cb) {
+ if (opts.conf.argv.remain.length >= 3) return cb()
+ var un = encodeURIComponent(npm.config.get("username"))
+ if (!un) return cb()
+ registry.get("/-/by-user/"+un, function (er, pkgs) {
+ // do a bit of filtering at this point, so that we don't need
+ // to fetch versions for more than one thing, but also don't
+ // accidentally a whole project.
+ pkgs = pkgs[un]
+ if (!pkgs || !pkgs.length) return cb()
+ var partial = opts.partialWord.split("@")
+ , pp = partial.shift()
+ , pv = partial.join("@")
+ pkgs = pkgs.filter(function (p) {
+ return p.indexOf(pp) === 0
+ })
+ if (pkgs.length > 1) return cb(null, pkgs)
+ registry.get(pkgs[0], function (er, d) {
+ if (er) return cb(er)
+ var vers = Object.keys(d.versions)
+ if (!vers.length) return cb(null, pkgs)
+ return cb(null, vers.map(function (v) {
+ return pkgs[0]+"@"+v
+ }))
+ })
+ })
+}
+
+function unpublish (args, cb) {
+ var thing = args.length ? args.shift().split("@") : []
+ , project = thing.shift()
+ , version = thing.join("@")
+
+ if (!project || path.resolve(project) === npm.prefix) {
+ // if there's a package.json in the current folder, then
+ // read the package name and version out of that.
+ var cwdJson = path.join(process.cwd(), "package.json")
+ return readJson(cwdJson, function (er, data) {
+ if (er) return cb("Usage:\n"+unpublish.usage)
+ gotProject(data.name, data.version, cb)
+ })
+ }
+ return gotProject(project, version, cb)
+}
+
+function gotProject (project, version, cb_) {
+ function cb (er) {
+ if (er) return cb_(er)
+ output.write("- " + project + (version ? "@" + version : ""), cb_)
+ }
+
+ // remove from the cache first
+ npm.commands.cache(["clean", project, version], function (er) {
+ if (er) return log.er(cb, "Failed to clean cache")(er)
+ registry.unpublish(project, version, cb)
+ })
+}
diff --git a/deps/npm/lib/update.js b/deps/npm/lib/update.js
new file mode 100644
index 0000000000..69b9f98e83
--- /dev/null
+++ b/deps/npm/lib/update.js
@@ -0,0 +1,39 @@
+/*
+for each pkg in prefix that isn't a git repo
+ look for a new version of pkg that satisfies dep
+ if so, install it.
+ if not, then update it
+*/
+
+module.exports = update
+
+update.usage = "npm update [pkg]"
+
+var npm = require("./npm.js")
+ , lifecycle = require("./utils/lifecycle.js")
+ , asyncMap = require("slide").asyncMap
+ , log = require("./utils/log.js")
+
+ // load these, just so that we know that they'll be available, in case
+ // npm itself is getting overwritten.
+ , install = require("./install.js")
+ , build = require("./build.js")
+
+update.completion = npm.commands.outdated.completion
+
+function update (args, cb) {
+ npm.commands.outdated(args, true, function (er, outdated) {
+ log(outdated, "outdated updating")
+ if (er) return cb(er)
+
+ asyncMap(outdated, function (ww, cb) {
+ // [[ dir, dep, has, want ]]
+ var where = ww[0]
+ , dep = ww[1]
+ , want = ww[3]
+ , what = dep + "@" + want
+
+ npm.commands.install(where, what, cb)
+ }, cb)
+ })
+}
diff --git a/deps/npm/lib/utils/cmd-shim.js b/deps/npm/lib/utils/cmd-shim.js
new file mode 100644
index 0000000000..802fdb8fc3
--- /dev/null
+++ b/deps/npm/lib/utils/cmd-shim.js
@@ -0,0 +1,98 @@
+// XXX Todo:
+// On windows, create a .cmd file.
+// Read the #! in the file to see what it uses. The vast majority
+// of the time, this will be either:
+// "#!/usr/bin/env <prog> <args...>"
+// or:
+// "#!<prog> <args...>"
+//
+// Write a binroot/pkg.bin + ".cmd" file that has this line in it:
+// @<prog> <args...> %~dp0<target> %*
+
+module.exports = cmdShim
+cmdShim.ifExists = cmdShimIfExists
+
+var fs = require("graceful-fs")
+ , chain = require("slide").chain
+ , mkdir = require("./mkdir-p.js")
+ , rm = require("rimraf")
+ , log = require("./log.js")
+ , path = require("path")
+ , relativize = require("./relativize.js")
+ , npm = require("../npm.js")
+ , shebangExpr = /^#\!(?:\/usr\/bin\/env )?([^ \t]+)(.*)$/
+
+function cmdShimIfExists (from, to, cb) {
+ fs.stat(from, function (er) {
+ if (er) return cb()
+ cmdShim(from, to, cb)
+ })
+}
+
+function cmdShim (from, to, cb) {
+ if (process.platform !== "win32") {
+ return cb(new Error(".cmd shims only should be used on windows"))
+ }
+
+ chain
+ ( [ [fs, "stat", from]
+ , [rm, to + ".cmd"]
+ , [mkdir, path.dirname(to)]
+ , [writeShim, from, to] ]
+ , cb )
+}
+
+function writeShim (from, to, cb) {
+ // make a cmd file
+ // First, check if the bin is a #! of some sort.
+ // If not, then assume it's something that'll be compiled, or some other
+ // sort of script, and just call it directly.
+ fs.readFile(from, "utf8", function (er, data) {
+ if (er) return writeShim_(from, to, null, null, cb)
+ var firstLine = data.trim().split(/\r*\n/)[0]
+ , shebang = firstLine.match(shebangExpr)
+ if (!shebang) return writeShim_(from, to, null, null, cb)
+ var prog = shebang[1]
+ , args = shebang[2] || ""
+ return writeShim_(from, to, prog, args, cb)
+ })
+}
+
+function writeShim_ (from, to, prog, args, cb) {
+ var target = relativize(from, to).split("/").join("\\")
+ , longProg
+ args = args || ""
+ if (!prog) {
+ prog = "\"%~dp0\\" + target + "\""
+ args = ""
+ target = ""
+ } else {
+ longProg = "\"%~dp0\"\\\"" + prog + ".exe\""
+ target = "\"%~dp0\\" + target + "\""
+ }
+
+ // @IF EXIST "%~dp0"\"node.exe" (
+ // "%~dp0\node.exe" "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
+ // ) ELSE (
+ // node "%~dp0\.\node_modules\npm\bin\npm-cli.js" %*
+ // )
+ var cmd
+ if (longProg) {
+ cmd = "@IF EXIST " + longProg + " (\r\n"
+ + " " + longProg + " " + args + " " + target + " %*\r\n"
+ + ") ELSE (\r\n"
+ + " " + prog + " " + args + " " + target + " %*\r\n"
+ + ")"
+ } else {
+ cmd = prog + " " + args + " " + target + " %*\r\n"
+ }
+
+ cmd = ":: Created by npm, please don't edit manually.\r\n" + cmd
+
+ fs.writeFile(to + ".cmd", cmd, "utf8", function (er) {
+ if (er) {
+ log.warn("Could not write "+to+".cmd", "cmdShim")
+ }
+ cb(er)
+ })
+}
diff --git a/deps/npm/lib/utils/completion.sh b/deps/npm/lib/utils/completion.sh
new file mode 100755
index 0000000000..f19046b198
--- /dev/null
+++ b/deps/npm/lib/utils/completion.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+###-begin-npm-completion-###
+#
+# npm command completion script
+#
+# Installation: npm completion >> ~/.bashrc (or ~/.zshrc)
+# Or, maybe: npm completion > /usr/local/etc/bash_completion.d/npm
+#
+
+COMP_WORDBREAKS=${COMP_WORDBREAKS/=/}
+COMP_WORDBREAKS=${COMP_WORDBREAKS/@/}
+export COMP_WORDBREAKS
+
+if complete &>/dev/null; then
+ _npm_completion () {
+ local si="$IFS"
+ IFS=$'\n' COMPREPLY=($(COMP_CWORD="$COMP_CWORD" \
+ COMP_LINE="$COMP_LINE" \
+ COMP_POINT="$COMP_POINT" \
+ npm completion -- "${COMP_WORDS[@]}" \
+ 2>/dev/null)) || return $?
+ IFS="$si"
+ }
+ complete -F _npm_completion npm
+elif compctl &>/dev/null; then
+ _npm_completion () {
+ local cword line point words si
+ read -Ac words
+ read -cn cword
+ let cword-=1
+ read -l line
+ read -ln point
+ si="$IFS"
+ IFS=$'\n' reply=($(COMP_CWORD="$cword" \
+ COMP_LINE="$line" \
+ COMP_POINT="$point" \
+ npm completion -- "${words[@]}" \
+ 2>/dev/null)) || return $?
+ IFS="$si"
+ }
+ compctl -K _npm_completion npm
+fi
+###-end-npm-completion-###
diff --git a/deps/npm/lib/utils/completion/file-completion.js b/deps/npm/lib/utils/completion/file-completion.js
new file mode 100644
index 0000000000..427efefb44
--- /dev/null
+++ b/deps/npm/lib/utils/completion/file-completion.js
@@ -0,0 +1,29 @@
+module.exports = fileCompletion
+
+var find = require("../find.js")
+ , mkdir = require("../mkdir-p.js")
+ , path = require("path")
+
+function fileCompletion (root, req, depth, cb) {
+ if (typeof cb !== "function") cb = depth, depth = Infinity
+ mkdir(root, function (er) {
+ if (er) return cb(er)
+ function dirFilter (f, type) {
+ // return anything that is a file,
+ // or not exactly the req.
+ return type !== "dir" ||
+ ( f && f !== path.join(root, req)
+ && f !== path.join(root, req) + "/" )
+ }
+ find(path.join(root, req), dirFilter, depth, function (er, files) {
+ if (er) return cb(er)
+ return cb(null, (files || []).map(function (f) {
+ return path.join(req, f.substr(root.length + 1)
+ .substr((f === req ? path.dirname(req)
+ : req).length)
+ .replace(/^\//, ""))
+ }))
+ })
+ })
+}
+
diff --git a/deps/npm/lib/utils/completion/installed-deep.js b/deps/npm/lib/utils/completion/installed-deep.js
new file mode 100644
index 0000000000..1188f408fa
--- /dev/null
+++ b/deps/npm/lib/utils/completion/installed-deep.js
@@ -0,0 +1,46 @@
+module.exports = installedDeep
+
+var npm = require("../../npm.js")
+ , readInstalled = require("../read-installed.js")
+
+function installedDeep (opts, cb) {
+ var local
+ , global
+ if (npm.config.get("global")) local = [], next()
+ else readInstalled(npm.prefix, function (er, data) {
+ local = getNames(data || {})
+ next()
+ })
+ readInstalled(npm.config.get("prefix"), function (er, data) {
+ global = getNames(data || {})
+ next()
+ })
+
+ function getNames_ (d, n) {
+ if (d.realName && n) {
+ if (n[d.realName]) return n
+ n[d.realName] = true
+ }
+ if (!n) n = {}
+ Object.keys(d.dependencies || {}).forEach(function (dep) {
+ getNames_(d.dependencies[dep], n)
+ })
+ return n
+ }
+ function getNames (d) {
+ return Object.keys(getNames_(d))
+ }
+
+ function next () {
+ if (!local || !global) return
+ if (!npm.config.get("global")) {
+ global = global.map(function (g) {
+ return [g, "-g"]
+ })
+ }
+ var names = local.concat(global)
+ return cb(null, names)
+ }
+
+}
+
diff --git a/deps/npm/lib/utils/completion/installed-shallow.js b/deps/npm/lib/utils/completion/installed-shallow.js
new file mode 100644
index 0000000000..1ee3cce66c
--- /dev/null
+++ b/deps/npm/lib/utils/completion/installed-shallow.js
@@ -0,0 +1,79 @@
+
+module.exports = installedShallow
+
+var npm = require("../../npm.js")
+ , fs = require("graceful-fs")
+ , path = require("path")
+ , readJson = require("../read-json.js")
+ , asyncMap = require("slide").asyncMap
+
+function installedShallow (opts, filter, cb) {
+ if (typeof cb !== "function") cb = filter, filter = null
+ var conf = opts.conf
+ , args = conf.argv.remain
+ if (args.length > 3) return cb()
+ var local
+ , global
+ , localDir = npm.dir
+ , globalDir = npm.globalDir
+ if (npm.config.get("global")) local = [], next()
+ else fs.readdir(localDir, function (er, pkgs) {
+ local = (pkgs || []).filter(function (p) {
+ return p.charAt(0) !== "."
+ })
+ next()
+ })
+ fs.readdir(globalDir, function (er, pkgs) {
+ global = (pkgs || []).filter(function (p) {
+ return p.charAt(0) !== "."
+ })
+ next()
+ })
+ function next () {
+ if (!local || !global) return
+ filterInstalled(local, global, filter, cb)
+ }
+}
+
+function filterInstalled (local, global, filter, cb) {
+ var fl
+ , fg
+
+ if (!filter) {
+ fl = local
+ fg = global
+ return next()
+ }
+
+ asyncMap(local, function (p, cb) {
+ readJson(path.join(npm.dir, p, "package.json"), function (er, d) {
+ if (!d || !filter(d)) return cb(null, [])
+ return cb(null, d.name)
+ })
+ }, function (er, local) {
+ fl = local || []
+ next()
+ })
+
+ var globalDir = npm.globalDir
+ asyncMap(global, function (p, cb) {
+ readJson(path.join(globalDir, p, "package.json"), function (er, d) {
+ if (!d || !filter(d)) return cb(null, [])
+ return cb(null, d.name)
+ })
+ }, function (er, global) {
+ fg = global || []
+ next()
+ })
+
+ function next () {
+ if (!fg || !fl) return
+ if (!npm.config.get("global")) {
+ fg = fg.map(function (g) {
+ return [g, "-g"]
+ })
+ }
+ console.error("filtered", fl, fg)
+ return cb(null, fl.concat(fg))
+ }
+}
diff --git a/deps/npm/lib/utils/completion/remote-packages.js b/deps/npm/lib/utils/completion/remote-packages.js
new file mode 100644
index 0000000000..4bf82d070a
--- /dev/null
+++ b/deps/npm/lib/utils/completion/remote-packages.js
@@ -0,0 +1,57 @@
+
+module.exports = remotePackages
+
+var registry = require("../npm-registry-client/index.js")
+ , containsSingleMatch = require("./contains-single-match.js")
+ , getCompletions = require("./get-completions.js")
+
+/*
+ Looks up remote packages for CLI tab-completion.
+
+ NOTE: If doVersion is true, versions in the form <name>@<version>
+ will be completed.
+
+ If doTag is true, tags in the form <name>@<tag> will be
+ completed.
+
+ If recurring in true, sequences of multiple packages can be
+ completed. i.e. for schemes such as:
+ npm <command> <name>[@<version> [<name>[@<version>] ...]
+*/
+function remotePackages (args, index, doVersion, doTag
+ , recurring, cb) {
+ if (recurring || index < 3) {
+ var name = (args.length + 1 === index) ? args[args.length - 1] : ""
+ if (name === undefined) name = ""
+ if (name.indexOf("/") !== -1) return cb(null, [])
+ // use up-to 1 hour stale cache. not super urgent.
+ registry.get("/", null, 3600, function (er, d) {
+ if (er) return cb(er)
+ var remoteList = Object.keys(d)
+ , found = remoteList.indexOf(name)
+ , unique = found && containsSingleMatch(name, remoteList)
+ , simpleMatches = getCompletions(name, remoteList)
+ , uniqueMatch = unique && simpleMatches[0]
+ , addTag = doTag && (unique || found || name.indexOf("@") !== -1)
+ , addVer = doVersion && (unique || found || name.indexOf("@") !== -1)
+ , list = []
+ , pieces = (uniqueMatch || name).split("@")
+ , pkgname = pieces[0]
+ , extras = []
+ if (unique && !addTag && !addVer) return cb(null, [uniqueMatch])
+ if (d[pkgname] && (addTag || addVer)) {
+ if (d[pkgname].versions && addVer) {
+ extras = extras.concat(Object.keys(d[pkgname].versions))
+ }
+ if (d[pkgname]["dist-tags"] && addTag) {
+ extras = extras.concat(Object.keys(d[pkgname]["dist-tags"]))
+ }
+ list = getCompletions(name, list.concat(extras.map(function (e) {
+ return pkgname + "@" + e
+ })))
+ }
+ if (!unique) list = list.concat(getCompletions(name, remoteList))
+ return cb(null, list)
+ })
+ }
+}
diff --git a/deps/npm/lib/utils/completion/users.js b/deps/npm/lib/utils/completion/users.js
new file mode 100644
index 0000000000..f77312c94e
--- /dev/null
+++ b/deps/npm/lib/utils/completion/users.js
@@ -0,0 +1,22 @@
+
+module.exports = users
+
+var registry = require("../npm-registry-client/index.js")
+ , containsSingleMatch = require("./contains-single-match.js")
+ , getCompletions = require("./get-completions.js")
+ , log = require("../log.js")
+
+function users (args, index, cb) {
+ var name = (args.length + 1 === index) ? args[args.length - 1] : ""
+ if (name === undefined) name = ""
+ // use up-to 1 day stale cache. doesn't change much
+ log.warn("About to fetch", "users completion")
+ registry.get("/-/users", null, 24*60*60, function (er, d) {
+ log.warn(d, "userdata")
+ log.warn(name, "name")
+ if (er) return cb(er)
+ var remoteList = Object.keys(d)
+ , simpleMatches = getCompletions(name, remoteList)
+ return cb(null, simpleMatches)
+ })
+}
diff --git a/deps/npm/lib/utils/config-defs.js b/deps/npm/lib/utils/config-defs.js
new file mode 100644
index 0000000000..55d5c887de
--- /dev/null
+++ b/deps/npm/lib/utils/config-defs.js
@@ -0,0 +1,299 @@
+// defaults, types, and shorthands.
+
+
+var path = require("path")
+ , url = require("url")
+ , Stream = require("stream").Stream
+ , semver = require("semver")
+ , stableFamily = semver.parse(process.version)
+ , os = require("os")
+ , nopt = require("nopt")
+ , log = require("./log.js")
+
+function Octal () {}
+function validateOctal (data, k, val) {
+ // must be either an integer or an octal string.
+ if (typeof val === "number") {
+ data[k] = "0" + val.toString(8)
+ }
+ if (typeof val === "string") {
+ if (val.charAt(0) !== "0" || isNaN(val)) return false
+ data[k] = "0" + parseInt(val, 8).toString(8)
+ }
+}
+
+function validateSemver (data, k, val) {
+ if (!semver.valid(val)) return false
+ data[k] = semver.valid(val)
+}
+
+nopt.typeDefs.semver = { type: semver, validate: validateSemver }
+nopt.typeDefs.Octal = { type: Octal, validate: validateOctal }
+
+nopt.invalidHandler = function (k, val, type, data) {
+ log.warn(k + "=" + JSON.stringify(val), "invalid config")
+
+ if (Array.isArray(type)) {
+ if (type.indexOf(url) !== -1) type = url
+ else if (type.indexOf(path) !== -1) type = path
+ }
+
+ switch (type) {
+ case Octal:
+ log.warn("Must be octal number, starting with 0", "invalid config")
+ break
+ case url:
+ log.warn("Must be a full url with 'http://'", "invalid config")
+ break
+ case path:
+ log.warn("Must be a valid filesystem path", "invalid config")
+ break
+ case Number:
+ log.warn("Must be a numeric value", "invalid config")
+ break
+ }
+}
+
+if (!stableFamily || (+stableFamily[2] % 2)) stableFamily = null
+else stableFamily = stableFamily[1] + "." + stableFamily[2]
+
+var httpsOk = semver.satisfies(process.version, ">=0.4.9")
+var winColor = semver.satisfies(process.version, ">=0.5.9")
+
+var defaults
+
+var temp = process.env.TMPDIR
+ || process.env.TMP
+ || process.env.TEMP
+ || ( process.platform === "win32"
+ ? "c:\\windows\\temp"
+ : "/tmp" )
+
+var home = ( process.platform === "win32"
+ ? process.env.USERPROFILE
+ : process.env.HOME ) || temp
+
+var globalPrefix
+Object.defineProperty(exports, "defaults", {get: function () {
+ if (defaults) return defaults
+
+ if (process.env.PREFIX) {
+ globalPrefix = process.env.PREFIX
+ } else if (process.platform === "win32") {
+ // c:\node\node.exe --> prefix=c:\node\
+ globalPrefix = path.join(process.execPath, "..")
+ } else {
+ // /usr/local/bin/node --> prefix=/usr/local
+ globalPrefix = path.join(process.execPath, "..", "..")
+
+ // destdir only is respected on Unix
+ if (process.env.DESTDIR) {
+ globalPrefix = process.env.DESTDIR + "/" + globalPrefix
+ }
+ }
+
+ return defaults =
+ { "always-auth" : false
+
+ // Disable bindist publishing for now. Too problematic.
+ // Revisit when we have a less crappy approach, or just make
+ // bindist be a thing that only dedicated build-farms will enable.
+ , "bin-publish" : false
+
+ , bindist : stableFamily
+ && ( stableFamily + "-"
+ + "ares" + process.versions.ares + "-"
+ + "ev" + process.versions.ev + "-"
+ + "openssl" + process.versions.openssl + "-"
+ + "v8" + process.versions.v8 + "-"
+ + process.platform + "-"
+ + (process.arch ? process.arch + "-" : "")
+ + os.release() )
+
+ // are there others?
+ , browser : process.platform === "darwin" ? "open"
+ : process.platform === "win32" ? "start"
+ : "google-chrome"
+
+ , ca : // the npm CA certificate.
+ "-----BEGIN CERTIFICATE-----\n"+
+ "MIIChzCCAfACCQDauvz/KHp8ejANBgkqhkiG9w0BAQUFADCBhzELMAkGA1UEBhMC\n"+
+ "VVMxCzAJBgNVBAgTAkNBMRAwDgYDVQQHEwdPYWtsYW5kMQwwCgYDVQQKEwNucG0x\n"+
+ "IjAgBgNVBAsTGW5wbSBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxDjAMBgNVBAMTBW5w\n"+
+ "bUNBMRcwFQYJKoZIhvcNAQkBFghpQGl6cy5tZTAeFw0xMTA5MDUwMTQ3MTdaFw0y\n"+
+ "MTA5MDIwMTQ3MTdaMIGHMQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExEDAOBgNV\n"+
+ "BAcTB09ha2xhbmQxDDAKBgNVBAoTA25wbTEiMCAGA1UECxMZbnBtIENlcnRpZmlj\n"+
+ "YXRlIEF1dGhvcml0eTEOMAwGA1UEAxMFbnBtQ0ExFzAVBgkqhkiG9w0BCQEWCGlA\n"+
+ "aXpzLm1lMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDLI4tIqPpRW+ACw9GE\n"+
+ "OgBlJZwK5f8nnKCLK629Pv5yJpQKs3DENExAyOgDcyaF0HD0zk8zTp+ZsLaNdKOz\n"+
+ "Gn2U181KGprGKAXP6DU6ByOJDWmTlY6+Ad1laYT0m64fERSpHw/hjD3D+iX4aMOl\n"+
+ "y0HdbT5m1ZGh6SJz3ZqxavhHLQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAC4ySDbC\n"+
+ "l7W1WpLmtLGEQ/yuMLUf6Jy/vr+CRp4h+UzL+IQpCv8FfxsYE7dhf/bmWTEupBkv\n"+
+ "yNL18lipt2jSvR3v6oAHAReotvdjqhxddpe5Holns6EQd1/xEZ7sB1YhQKJtvUrl\n"+
+ "ZNufy1Jf1r0ldEGeA+0ISck7s+xSh9rQD2Op\n"+
+ "-----END CERTIFICATE-----\n"
+
+ , cache : path.resolve( home || temp
+ , process.platform === "win32"
+ ? "npm-cache" : ".npm")
+ , color : process.platform !== "win32" || winColor
+ , depth: Infinity
+ , description : true
+ , dev : false
+ , editor : process.env.EDITOR ||
+ ( process.platform === "win32" ? "notepad" : "vi" )
+ , force : false
+ , global : false
+ , globalconfig : path.resolve(process.execPath, "..", "..", "etc", "npmrc")
+ , globalignorefile : path.resolve( process.execPath
+ , "..", "..", "etc", "npmignore")
+ , group : process.platform === "win32" ? 0
+ : process.env.SUDO_GID || (process.getgid && process.getgid())
+ , ignore: ""
+ , "init.version" : "0.0.0"
+ , "init.author.name" : ""
+ , "init.author.email" : ""
+ , "init.author.url" : ""
+ , link: false
+ , logfd : 2
+ , loglevel : "warn"
+ , logprefix : process.platform !== "win32" || winColor
+ , long : false
+ , message : "%s"
+ , "node-version" : process.version
+ , npaturl : "http://npat.npmjs.org/"
+ , npat : false
+ , "onload-script" : false
+ , outfd : 1
+ , parseable : false
+ , pre: false
+ , prefix : globalPrefix
+ , production: false
+ , proxy : process.env.HTTP_PROXY || process.env.http_proxy || null
+ , "https-proxy" : process.env.HTTPS_PROXY || process.env.https_proxy ||
+ process.env.HTTP_PROXY || process.env.http_proxy || null
+ , "rebuild-bundle" : true
+ , registry : "http" + (httpsOk ? "s" : "") + "://registry.npmjs.org/"
+ , rollback : true
+ , save : false
+ , searchopts: ""
+ , searchexclude: null
+ , shell : process.env.platform === "win32"
+ ? process.env.ComSpec || "cmd"
+ : process.env.SHELL || "bash"
+ , "strict-ssl": true
+ , tag : "latest"
+ , tmp : temp
+ , unicode : true
+ , "unsafe-perm" : process.platform === "win32"
+ || process.platform === "cygwin"
+ || !( process.getuid && process.setuid
+ && process.getgid && process.setgid )
+ || process.getuid() !== 0
+ , usage : false
+ , user : process.platform === "win32" ? 0 : "nobody"
+ , username : ""
+ , userconfig : path.resolve(home, ".npmrc")
+ , userignorefile : path.resolve(home, ".npmignore")
+ , umask: 022
+ , version : false
+ , viewer: process.platform === "win32" ? "browser" : "man"
+ , yes: null
+
+ , _exit : true
+ }
+}})
+
+exports.types =
+ { "always-auth" : Boolean
+ , "bin-publish" : Boolean
+ , bindist : [null, String]
+ , browser : String
+ , ca: [null, String]
+ , cache : path
+ , color : ["always", Boolean]
+ , depth : Number
+ , description : Boolean
+ , dev : Boolean
+ , editor : String
+ , force : Boolean
+ , global : Boolean
+ , globalconfig : path
+ , globalignorefile: path
+ , group : [Number, String]
+ , "https-proxy" : [null, url]
+ , ignore : String
+ , "init.version" : [null, semver]
+ , "init.author.name" : String
+ , "init.author.email" : String
+ , "init.author.url" : ["", url]
+ , link: Boolean
+ , logfd : [Number, Stream]
+ , loglevel : ["silent","win","error","warn","info","verbose","silly"]
+ , logprefix : Boolean
+ , long : Boolean
+ , message: String
+ , "node-version" : [null, semver]
+ , npaturl : url
+ , npat : Boolean
+ , "onload-script" : [null, String]
+ , outfd : [Number, Stream]
+ , parseable : Boolean
+ , pre: Boolean
+ , prefix: path
+ , production: Boolean
+ , proxy : [null, url]
+ , "rebuild-bundle" : Boolean
+ , registry : [null, url]
+ , rollback : Boolean
+ , save : Boolean
+ , searchopts : String
+ , searchexclude: [null, String]
+ , shell : String
+ , "strict-ssl": Boolean
+ , tag : String
+ , tmp : path
+ , unicode : Boolean
+ , "unsafe-perm" : Boolean
+ , usage : Boolean
+ , user : [Number, String]
+ , username : String
+ , userconfig : path
+ , userignorefile : path
+ , umask: Octal
+ , version : Boolean
+ , viewer: String
+ , yes: [false, null, Boolean]
+ , _exit : Boolean
+ }
+
+exports.shorthands =
+ { s : ["--loglevel", "silent"]
+ , d : ["--loglevel", "info"]
+ , dd : ["--loglevel", "verbose"]
+ , ddd : ["--loglevel", "silly"]
+ , noreg : ["--no-registry"]
+ , reg : ["--registry"]
+ , "no-reg" : ["--no-registry"]
+ , silent : ["--loglevel", "silent"]
+ , verbose : ["--loglevel", "verbose"]
+ , h : ["--usage"]
+ , H : ["--usage"]
+ , "?" : ["--usage"]
+ , help : ["--usage"]
+ , v : ["--version"]
+ , f : ["--force"]
+ , gangster : ["--force"]
+ , gangsta : ["--force"]
+ , desc : ["--description"]
+ , "no-desc" : ["--no-description"]
+ , "local" : ["--no-global"]
+ , l : ["--long"]
+ , m : ["--message"]
+ , p : ["--parseable"]
+ , porcelain : ["--parseable"]
+ , g : ["--global"]
+ , S : ["--save"]
+ , y : ["--yes"]
+ , n : ["--no-yes"]
+ }
diff --git a/deps/npm/lib/utils/error-handler.js b/deps/npm/lib/utils/error-handler.js
new file mode 100644
index 0000000000..af52e61697
--- /dev/null
+++ b/deps/npm/lib/utils/error-handler.js
@@ -0,0 +1,264 @@
+
+module.exports = errorHandler
+
+var cbCalled = false
+ , log = require("./log.js")
+ , npm = require("../npm.js")
+ , rm = require("rimraf")
+ , constants = require("constants")
+ , itWorked = false
+ , path = require("path")
+ , ini = require("./ini.js")
+ , wroteLogFile = false
+
+
+process.on("exit", function (code) {
+ // console.error("exit", code)
+ if (!ini.resolved) return
+ if (code) itWorked = false
+ if (itWorked) log("ok")
+ else {
+ if (!cbCalled) {
+ log.error("cb() never called!\n ")
+ }
+ if (wroteLogFile) {
+ log.error([""
+ ,"Additional logging details can be found in:"
+ ," " + path.resolve("npm-debug.log")
+ ].join("\n"))
+ wroteLogFile = false
+ }
+ log.win("not ok")
+ }
+ itWorked = false // ready for next exit
+})
+
+function errorHandler (er) {
+ // console.error("errorHandler", er)
+ if (!ini.resolved) {
+ // logging won't work unless we pretend that it's ready
+ er = er || new Error("Exit prior to config file resolving.")
+ console.error(er.stack || er.message)
+ }
+
+ if (cbCalled) {
+ er = er || new Error("Callback called more than once.")
+ }
+
+ cbCalled = true
+ if (!er) return exit(0)
+ if (!(er instanceof Error)) {
+ log.error(er)
+ return exit(1, true)
+ }
+
+ var m = er.code || er.message.match(/^(?:Error: )?(E[A-Z]+)/)
+ if (m) {
+ m = m[1]
+ if (!constants[m] && !npm[m]) constants[m] = {}
+ er.errno = npm[m] || constants[m]
+ }
+
+ switch (er.errno) {
+ case "ECONNREFUSED":
+ case constants.ECONNREFUSED:
+ log.error(er)
+ log.error(["If you are using Cygwin, please set up your /etc/resolv.conf"
+ ,"See step 4 in this wiki page:"
+ ," http://github.com/ry/node/wiki/Building-node.js-on-Cygwin-%28Windows%29"
+ ,"If you are not using Cygwin, please report this"
+ ,"at <http://github.com/isaacs/npm/issues>"
+ ,"or email it to <npm-@googlegroups.com>"
+ ].join("\n"))
+ break
+
+ case "EACCES":
+ case "EPERM":
+ case constants.EACCES:
+ case constants.EPERM:
+ log.error(er)
+ log.error(["",
+ "Please use 'sudo' or log in as root to run this command."
+ ,""
+ ," sudo npm "
+ +npm.config.get("argv").original.map(JSON.stringify).join(" ")
+ ,""
+ ,"or set the 'unsafe-perm' config var to true."
+ ,""
+ ," npm config set unsafe-perm true"
+ ].join("\n"))
+ break
+
+ case npm.ELIFECYCLE:
+ er.code = "ELIFECYCLE"
+ log.error(er.message)
+ log.error(["","Failed at the "+er.pkgid+" "+er.stage+" script."
+ ,"This is most likely a problem with the "+er.pkgname+" package,"
+ ,"not with npm itself."
+ ,"Tell the author that this fails on your system:"
+ ," "+er.script
+ ,"You can get their info via:"
+ ," npm owner ls "+er.pkgname
+ ,"There is likely additional logging output above."
+ ].join("\n"))
+ break
+
+ case npm.EJSONPARSE:
+ er.code = "EJSONPARSE"
+ log.error(er.message)
+ log.error("File: "+er.file)
+ log.error(["Failed to parse package.json data."
+ ,"package.json must be actual JSON, not just JavaScript."
+ ,"","This is not a bug in npm."
+ ,"Tell the package author to fix their package.json file."
+ ].join("\n"), "JSON.parse")
+ break
+
+ case npm.E404:
+ er.code = "E404"
+ if (er.pkgid && er.pkgid !== "-") {
+ var msg = ["'"+er.pkgid+"' is not in the npm registry."
+ ,"You could maybe bug the author to publish it"]
+ if (er.pkgid.match(/^node[\.\-]|[\.\-]js$/)) {
+ var s = er.pkgid.replace(/^node[\.\-]|[\.\-]js$/g, "")
+ if (s !== er.pkgid) {
+ s = s.replace(/[^a-z0-9]/g, ' ')
+ msg.push("Maybe try 'npm search " + s + "'")
+ }
+ }
+ msg.push("Note that you can also install from a tarball or folder.")
+ log.error(msg.join("\n"), "404")
+ }
+ break
+
+ case npm.EPUBLISHCONFLICT:
+ er.code = "EPUBLISHCONFLICT"
+ log.error(["Cannot publish over existing version."
+ ,"Bump the 'version' field, set the --force flag, or"
+ ," npm unpublish '"+er.pkgid+"'"
+ ,"and try again"
+ ].join("\n"), "publish fail" )
+ break
+
+ case npm.EISGIT:
+ er.code = "EISGIT"
+ log.error([er.message
+ ," "+er.path
+ ,"Refusing to remove it. Update manually,"
+ ,"or move it out of the way first."
+ ].join("\n"), "git" )
+ break
+
+ case npm.ECYCLE:
+ er.code = "ECYCLE"
+ log.error([er.message
+ ,"While installing: "+er.pkgid
+ ,"Found a pathological dependency case that npm cannot solve."
+ ,"Please report this to the package author."
+ ].join("\n"))
+ break
+
+ case npm.ENOTSUP:
+ er.code = "ENOTSUP"
+ log.error([er.message
+ ,"Not compatible with your version of node/npm: "+er.pkgid
+ ,"Required: "+JSON.stringify(er.required)
+ ,"Actual: "
+ +JSON.stringify({npm:npm.version
+ ,node:npm.config.get("node-version")})
+ ].join("\n"))
+ break
+
+ case "EEXIST":
+ case constants.EEXIST:
+ log.error([er.message
+ ,"File exists: "+er.path
+ ,"Move it away, and try again."].join("\n"))
+ break
+
+ default:
+ log.error(er)
+ log.error(["Report this *entire* log at:"
+ ," <http://github.com/isaacs/npm/issues>"
+ ,"or email it to:"
+ ," <npm-@googlegroups.com>"
+ ].join("\n"))
+ break
+ }
+
+ var os = require("os")
+ log.error("")
+ log.error(os.type() + " " + os.release(), "System")
+ log.error(process.argv
+ .map(JSON.stringify).join(" "), "command")
+ log.error(process.cwd(), "cwd")
+ log.error(process.version, "node -v")
+ log.error(npm.version, "npm -v")
+
+ ; [ "file"
+ , "path"
+ , "type"
+ , "syscall"
+ , "fstream_path"
+ , "fstream_unc_path"
+ , "fstream_type"
+ , "fstream_class"
+ , "fstream_finish_call"
+ , "fstream_linkpath"
+ , "arguments"
+ , "code"
+ , "message"
+ ].forEach(function (k) {
+ if (er[k]) log.error(er[k], k)
+ })
+
+ if (er.fstream_stack) {
+ log.error(er.fstream_stack.join("\n"), "fstream_stack")
+ }
+
+ if (er.errno && typeof er.errno !== "object") log.error(er.errno, "errno")
+ exit(typeof er.errno === "number" ? er.errno : 1)
+}
+
+function exit (code, noLog) {
+ var doExit = npm.config.get("_exit")
+ log.verbose([code, doExit], "exit")
+ if (log.level === log.LEVEL.silent) noLog = true
+
+ if (code && !noLog) writeLogFile(reallyExit)
+ else rm("npm-debug.log", function () { rm(npm.tmp, reallyExit) })
+
+ function reallyExit() {
+ itWorked = !code
+ //if (!itWorked) {
+ if (!doExit) process.emit("exit", code)
+ else process.exit(code)
+ //}
+ }
+}
+
+var writingLogFile = false
+function writeLogFile (cb) {
+ if (writingLogFile) return cb()
+ writingLogFile = true
+ wroteLogFile = true
+
+ var fs = require("graceful-fs")
+ , fstr = fs.createWriteStream("npm-debug.log")
+ , util = require("util")
+
+ log.history.forEach(function (m) {
+ var lvl = log.LEVEL[m.level]
+ , pref = m.pref ? " " + m.pref : ""
+ , b = lvl + pref + " "
+ , eol = process.platform === "win32" ? "\r\n" : "\n"
+ , msg = typeof m.msg === "string" ? m.msg
+ : msg instanceof Error ? msg.stack || msg.message
+ : util.inspect(m.msg, 0, 4)
+ fstr.write(new Buffer(b
+ +(msg.split(/\r?\n+/).join(eol+b))
+ + eol))
+ })
+ fstr.end()
+ fstr.on("close", cb)
+}
diff --git a/deps/npm/lib/utils/excludes.js b/deps/npm/lib/utils/excludes.js
new file mode 100644
index 0000000000..6df89ccaf4
--- /dev/null
+++ b/deps/npm/lib/utils/excludes.js
@@ -0,0 +1,145 @@
+// build up a set of exclude lists in order of precedence:
+// [ ["!foo", "bar"]
+// , ["foo", "!bar"] ]
+// being *included* will override a previous exclusion,
+// and being excluded will override a previous inclusion.
+//
+// Each time the tar file-list generator thingie enters a new directory,
+// it calls "addIgnoreFile(dir, list, cb)". If an ignore file is found,
+// then it is added to the list and the cb() is called with an
+// child of the original list, so that we don't have
+// to worry about popping it off at the right time, since other
+// directories will continue to use the original parent list.
+//
+// If no ignore file is found, then the original list is returned.
+//
+// To start off with, ~/.{npm,git}ignore is added, as is
+// prefix/{npm,git}ignore, effectively treated as if they were in the
+// base package directory.
+
+exports.addIgnoreFile = addIgnoreFile
+exports.readIgnoreFile = readIgnoreFile
+exports.parseIgnoreFile = parseIgnoreFile
+exports.test = test
+exports.filter = filter
+
+var path = require("path")
+ , fs = require("graceful-fs")
+ , minimatch = require("minimatch")
+ , relativize = require("./relativize.js")
+ , log = require("./log.js")
+
+// todo: memoize
+
+// read an ignore file, or fall back to the
+// "gitBase" file in the same directory.
+function readIgnoreFile (file, gitBase, cb) {
+ //log.warn(file, "ignoreFile")
+ if (!file) return cb(null, "")
+ fs.readFile(file, function (er, data) {
+ if (!er || !gitBase) return cb(null, data || "")
+ var gitFile = path.resolve(path.dirname(file), gitBase)
+ fs.readFile(gitFile, function (er, data) {
+ return cb(null, data || "")
+ })
+ })
+}
+
+// read a file, and then return the list of patterns
+function parseIgnoreFile (file, gitBase, dir, cb) {
+ readIgnoreFile(file, gitBase, function (er, data) {
+ data = data ? data.toString("utf8") : ""
+
+ data = data.split(/[\r\n]+/).map(function (p) {
+ return p.trim()
+ }).filter(function (p) {
+ return p.length && p.charAt(0) !== "#"
+ })
+ data.dir = dir
+ return cb(er, data)
+ })
+}
+
+// add an ignore file to an existing list which can
+// then be passed to the test() function. If the ignore
+// file doesn't exist, then the list is unmodified. If
+// it is, then a concat-child of the original is returned,
+// so that this is suitable for walking a directory tree.
+function addIgnoreFile (file, gitBase, list, dir, cb) {
+ if (typeof cb !== "function") cb = dir, dir = path.dirname(file)
+ if (typeof cb !== "function") cb = list, list = []
+ parseIgnoreFile(file, gitBase, dir, function (er, data) {
+ if (!er && data) {
+ // package.json "files" array trumps everything
+ // Make sure it's always last.
+ if (list.length && list[list.length-1].packageFiles) {
+ list = list.concat([data, list.pop()])
+ } else {
+ list = list.concat([data])
+ }
+ }
+ cb(er, list)
+ })
+}
+
+
+// no IO
+// loop through the lists created in the functions above, and test to
+// see if a file should be included or not, given those exclude lists.
+function test (file, excludeList) {
+ if (path.basename(file) === "package.json") return true
+ //log.warn(file, "test file")
+ //log.warn(excludeList, "test list")
+ var incRe = /^\!(\!\!)*/
+ , excluded = false
+ for (var i = 0, l = excludeList.length; i < l; i ++) {
+ var excludes = excludeList[i]
+ , dir = excludes.dir
+
+ // chop the filename down to be relative to excludeDir
+ var rf = relativize(file, dir, true)
+ rf = rf.replace(/^\.\//, "")
+
+ for (var ii = 0, ll = excludes.length; ii < ll; ii ++) {
+ //log.warn(JSON.stringify(excludes[ii]), "ex")
+ var ex = excludes[ii].replace(/^(!*)\.\//, "$1")
+ , inc = !!ex.match(incRe)
+
+ // excluding/including a dir excludes/includes all the files in it.
+ if (ex.slice(-1) === "/") ex += "**"
+
+ // if this is not an inclusion attempt, and someone else
+ // excluded it, then just continue, because there's nothing
+ // that can be done here to change the exclusion.
+ if (!inc && excluded) continue
+
+ // if it's an inclusion attempt, and the file has not been
+ // excluded, then skip it, because there's no need to try again.
+ if (inc && !excluded) continue
+
+ // if it matches the pattern, then it should be excluded.
+ excluded = !!minimatch(rf, ex, { baseMatch: true })
+ //if (inc) excluded = !excluded
+
+ //if (excluded) {
+ // console.error("excluded %s %s", rf, ex)
+ //}
+
+ // if you include foo, then it also includes foo/bar.js
+ if (inc && excluded && ex.slice(-3) !== "/**") {
+ excluded = minimatch(rf, ex + "/**", { baseMatch: true })
+ // console.error(rf, ex + "/**", inc, excluded)
+ }
+ }
+ //log.warn([rf, excluded, excludes], "file, excluded, excludes")
+ }
+ // true if it *should* be included
+ // log.warn([file, excludeList, excluded], "file, excluded")
+ return !excluded
+}
+
+// returns a function suitable for Array#filter
+function filter (dir, list) { return function (file) {
+ file = file.trim()
+ return file && test(path.resolve(dir, file), list)
+}}
diff --git a/deps/npm/lib/utils/exec.js b/deps/npm/lib/utils/exec.js
new file mode 100644
index 0000000000..0fa0371b2e
--- /dev/null
+++ b/deps/npm/lib/utils/exec.js
@@ -0,0 +1,106 @@
+
+module.exports = exec
+exec.spawn = spawn
+exec.pipe = pipe
+
+var log = require("./log.js")
+ , child_process = require("child_process")
+ , util = require("util")
+ , npm = require("../npm.js")
+ , myUID = process.getuid ? process.getuid() : null
+ , myGID = process.getgid ? process.getgid() : null
+ , isRoot = process.getuid && myUID === 0
+ , constants = require("constants")
+
+function exec (cmd, args, env, takeOver, cwd, uid, gid, cb) {
+ if (typeof cb !== "function") cb = gid, gid = null
+ if (typeof cb !== "function") cb = uid, uid = null
+ if (typeof cb !== "function") cb = cwd, cwd = null
+ if (typeof cb !== "function") cb = takeOver, takeOver = true
+ if (typeof cb !== "function") cb = env, env = process.env
+ gid = gid == null ? myGID : gid
+ uid = uid == null ? myUID : uid
+ if (!isRoot) {
+ if (npm.config.get("unsafe-perm")) {
+ uid = myUID
+ gid = myGID
+ } else if (uid !== myUID || gid !== myGID) {
+ var e = new Error("EPERM: setuid() operation not permitted")
+ e.errno = constants.EPERM
+ return cb(e)
+ }
+ }
+ if (uid !== myUID) {
+ log.verbose(uid, "Setting uid from "+myUID)
+ log.verbose(new Error().stack, "stack at uid setting")
+ }
+ log.silly(cmd+" "+args.map(JSON.stringify).join(" "), "exec")
+ var stdout = ""
+ , stderr = ""
+ , cp = spawn(cmd, args, env, takeOver, cwd, uid, gid)
+ cp.stdout && cp.stdout.on("data", function (chunk) {
+ if (chunk) stdout += chunk
+ })
+ cp.stderr && cp.stderr.on("data", function (chunk) {
+ if (chunk) stderr += chunk
+ })
+ cp.on("exit", function (code) {
+ var er = null
+ if (code) er = new Error("`"+cmd
+ +(args.length ? " "
+ + args.map(JSON.stringify).join(" ")
+ : "")
+ +"` failed with "+code)
+ cb(er, code, stdout, stderr)
+ })
+ return cp
+}
+
+function logger (d) { if (d) process.stderr.write(d+"") }
+function pipe (cp1, cp2, cb) {
+ util.pump(cp1.stdout, cp2.stdin)
+ var errState = null
+ , buff1 = ""
+ , buff2 = ""
+ if (log.level <= log.LEVEL.silly) {
+ cp1.stderr.on("data", logger)
+ cp2.stderr.on("data", logger)
+ } else {
+ cp1.stderr.on("data", function (d) { buff1 += d })
+ cp2.stderr.on("data", function (d) { buff2 += d })
+ }
+
+ cp1.on("exit", function (code) {
+ if (!code) return log.verbose(cp1.name || "<unknown>", "success")
+ if (!cp2._exited) cp2.kill()
+ log.error(buff1, cp1.name || "<unknown>")
+ cb(errState = new Error(
+ "Failed "+(cp1.name || "<unknown>")+"\nexited with "+code))
+ })
+ cp2.on("exit", function (code) {
+ cp2._exited = true
+ if (errState) return
+ if (!code) return log.verbose(cp2.name || "<unknown>", "success", cb)
+ log.error(buff2, cp2.name || "<unknown>")
+ cb(new Error( "Failed "
+ + (cp2.name || "<unknown>")
+ + "\nexited with " + code ))
+ })
+}
+
+function spawn (c, a, env, takeOver, cwd, uid, gid) {
+ var fds = [ 0, 1, 2 ]
+ , opts = { customFds : takeOver ? fds : [-1,-1,-1]
+ , env : env || process.env
+ , cwd : cwd || null }
+ , cp
+ if (uid != null) opts.uid = uid
+ if (gid != null) opts.gid = gid
+ if (!isNaN(opts.uid)) opts.uid = +opts.uid
+ if (!isNaN(opts.gid)) opts.gid = +opts.gid
+ var name = c +" "+ a.map(JSON.stringify).join(" ")
+ log.silly([c, a, opts.cwd], "spawning")
+ cp = child_process.spawn(c, a, opts)
+ cp.name = name
+ return cp
+}
diff --git a/deps/npm/lib/utils/fetch.js b/deps/npm/lib/utils/fetch.js
new file mode 100644
index 0000000000..726be8848b
--- /dev/null
+++ b/deps/npm/lib/utils/fetch.js
@@ -0,0 +1,62 @@
+/**
+ * Fetch an HTTP url to a local file.
+ **/
+
+var request = require("request")
+ , fs = require("graceful-fs")
+ , npm = require("../npm.js")
+ , url = require("url")
+ , log = require("./log.js")
+ , path = require("path")
+ , mkdir = require("./mkdir-p.js")
+ , regHost
+ , getAgent = require("./get-agent.js")
+
+module.exports = fetch
+
+function fetch (remote, local, headers, cb) {
+ if (typeof cb !== "function") cb = headers, headers = {}
+ log.info(remote, "fetch")
+ log.verbose(local, "fetch to")
+ mkdir(path.dirname(local), function (er) {
+ if (er) return cb(er)
+ fetch_(remote, local, headers, cb)
+ })
+}
+
+function fetch_ (remote, local, headers, cb) {
+ var fstr = fs.createWriteStream(local, { mode : npm.modes.file })
+ fstr.on("error", function (er) {
+ fs.close(fstr.fd, function () {})
+ if (fstr._ERROR) return
+ cb(fstr._ERROR = er)
+ })
+ fstr.on("open", function () {
+ makeRequest(remote, fstr, headers)
+ })
+ fstr.on("close", function () {
+ if (fstr._ERROR) return
+ cb()
+ })
+}
+
+function makeRequest (remote, fstr, headers) {
+ remote = url.parse(remote)
+ regHost = regHost || url.parse(npm.config.get("registry")).host
+
+ if (remote.host === regHost && npm.config.get("always-auth")) {
+ remote.auth = new Buffer( npm.config.get("_auth")
+ , "base64" ).toString("utf8")
+ if (!remote.auth) return fstr.emit("error", new Error(
+ "Auth required and none provided. Please run 'npm adduser'"))
+ }
+
+ var proxy = npm.config.get( remote.protocol === "https:"
+ ? "https-proxy"
+ : "proxy")
+
+ request({ url: remote
+ , proxy: proxy
+ , agent: getAgent(remote)
+ , strictSSL: npm.config.get("strict-ssl") }).pipe(fstr)
+}
diff --git a/deps/npm/lib/utils/find-prefix.js b/deps/npm/lib/utils/find-prefix.js
new file mode 100644
index 0000000000..320456c7ea
--- /dev/null
+++ b/deps/npm/lib/utils/find-prefix.js
@@ -0,0 +1,50 @@
+// try to find the most reasonable prefix to use
+
+module.exports = findPrefix
+
+var fs = require("graceful-fs")
+ , path = require("path")
+ , npm = require("../npm.js")
+
+function findPrefix (p, cb_) {
+ function cb (er, p) {
+ process.nextTick(function () {
+ cb_(er, p)
+ })
+ }
+
+ p = path.resolve(p)
+ if (npm.config.get("global")) return cb(null, p)
+ // if there's no node_modules folder, then
+ // walk up until we hopefully find one.
+ // if none anywhere, then use cwd.
+ var walkedUp = false
+ while (path.basename(p) === "node_modules") {
+ p = path.dirname(p)
+ walkedUp = true
+ }
+ if (walkedUp) return cb(null, p)
+
+ findPrefix_(p, p, cb)
+}
+
+function findPrefix_ (p, original, cb) {
+ if (p === "/"
+ || (process.platform === "win32" && p.match(/^[a-zA-Z]:(\\|\/)?$/))) {
+ return cb(null, original)
+ }
+ fs.readdir(p, function (er, files) {
+ // an error right away is a bad sign.
+ if (er && p === original) return cb(er)
+
+ // walked up too high or something.
+ if (er) return cb(null, original)
+
+ if (files.indexOf("node_modules") !== -1
+ || files.indexOf("package.json") !== -1) {
+ return cb(null, p)
+ }
+
+ return findPrefix_(path.dirname(p), original, cb)
+ })
+}
diff --git a/deps/npm/lib/utils/find.js b/deps/npm/lib/utils/find.js
new file mode 100644
index 0000000000..89d1c2bc79
--- /dev/null
+++ b/deps/npm/lib/utils/find.js
@@ -0,0 +1,53 @@
+
+// walks a set of directories recursively, and returns
+// the list of files that match the filter, if one is
+// provided.
+
+module.exports = find
+var fs = require("graceful-fs")
+ , asyncMap = require("slide").asyncMap
+ , path = require("path")
+
+function find (dir, filter, depth, cb) {
+ if (typeof cb !== "function") cb = depth, depth = Infinity
+ if (typeof cb !== "function") cb = filter, filter = null
+ if (filter instanceof RegExp) filter = reFilter(filter)
+ if (typeof filter === "string") filter = strFilter(filter)
+ if (!Array.isArray(dir)) dir = [dir]
+ if (!filter) filter = nullFilter
+ asyncMap(dir, findDir(filter, depth), cb)
+}
+function findDir (filter, depth) { return function (dir, cb) {
+ fs.lstat(dir, function (er, stats) {
+ // don't include missing files, but don't abort either
+ if (er) return cb()
+ if (!stats.isDirectory()) return findFile(dir, filter, depth)("", cb)
+ var found = []
+ if (!filter || filter(dir, "dir")) found.push(dir+"/")
+ if (depth <= 0) return cb(null, found)
+ cb = (function (cb) { return function (er, f) {
+ cb(er, found.concat(f))
+ }})(cb)
+ fs.readdir(dir, function (er, files) {
+ if (er) return cb(er)
+ asyncMap(files, findFile(dir, filter, depth - 1), cb)
+ })
+ })
+}}
+function findFile (dir, filter, depth) { return function (f, cb) {
+ f = path.join(dir, f)
+ fs.lstat(f, function (er, s) {
+ // don't include missing files, but don't abort either
+ if (er) return cb(null, [])
+ if (s.isDirectory()) return find(f, filter, depth, cb)
+ if (!filter || filter(f, "file")) cb(null, f)
+ else cb(null, [])
+ })
+}}
+function reFilter (re) { return function (f, type) {
+ return nullFilter(f, type) && f.match(re)
+}}
+function strFilter (s) { return function (f, type) {
+ return nullFilter(f, type) && f.indexOf(s) === 0
+}}
+function nullFilter (f, type) { return type === "file" && f }
diff --git a/deps/npm/lib/utils/get-agent.js b/deps/npm/lib/utils/get-agent.js
new file mode 100644
index 0000000000..b4a5738d2d
--- /dev/null
+++ b/deps/npm/lib/utils/get-agent.js
@@ -0,0 +1,62 @@
+// get an http/https agent
+// This is necessary for the custom CA certs in http2,
+// especially while juggling multiple different registries.
+//
+// When using http2, the agent key is just the CA setting,
+// since it can manage socket pooling across different host:port
+// options. When using the older implementation, the
+// key is ca:host:port combination.
+
+module.exports = getAgent
+
+var npm = require("../npm.js")
+ , url = require("url")
+ , agents = {}
+ , isHttp2 = !!require("http").globalAgent
+ , registry = url.parse(npm.config.get("registry"))
+ , regCA = npm.config.get("ca")
+
+function getAgent (remote) {
+ // If not doing https, then there's no CA cert to manage.
+ // on http2, this will use the default global agent.
+ // on http1, this is undefined, so it'll spawn based on
+ // host:port if necessary.
+ if (remote.protocol !== "https:") {
+ return require("http").globalAgent
+ }
+
+ if (typeof remote === "string") {
+ remote = url.parse(remote)
+ }
+
+ var ca
+ // if this is the registry, then use the configuration ca.
+ // otherwise, just use the built-in CAs that node has.
+ // todo: multi-registry support.
+ if (remote.hostname === registry.hostname
+ && remote.port === registry.port) {
+ ca = regCA
+ }
+
+ // no CA, just use the default agent.
+ if (!ca) {
+ return require("https").globalAgent
+ }
+
+ var hostname = remote.hostname
+ , port = remote.port
+ , key = agentKey(hostname, port, ca)
+
+ return agents[key] = agents[key] || getAgent_(hostname, port, ca)
+}
+
+function getAgent_ (hostname, port, ca) {
+ var Agent = require("https").Agent
+ return new Agent({ host: hostname
+ , port: port
+ , ca: ca })
+}
+
+function agentKey (hostname, port, ca) {
+ return JSON.stringify(isHttp2 ? ca : [hostname, port, ca])
+}
diff --git a/deps/npm/lib/utils/get.js b/deps/npm/lib/utils/get.js
new file mode 100644
index 0000000000..3c5e4f099b
--- /dev/null
+++ b/deps/npm/lib/utils/get.js
@@ -0,0 +1,6 @@
+
+module.exports = get
+function get (obj, key) {
+ for (var i in obj) if (i.toLowerCase() === key.toLowerCase()) return obj[i]
+ return undefined
+}
diff --git a/deps/npm/lib/utils/ini.js b/deps/npm/lib/utils/ini.js
new file mode 100644
index 0000000000..f31bdeb790
--- /dev/null
+++ b/deps/npm/lib/utils/ini.js
@@ -0,0 +1,333 @@
+// Create a chain of config objects, in this priority order:
+//
+// CLI - the --foo things in the command line.
+// ENV - all the things starting with npm_config_ in the environment
+// USER - $HOME/.npmrc
+// GLOBAL - $PREFIX/etc/npmrc
+//
+// If the CLI or ENV specify a userconfig, then that file is used
+// as the USER config.
+//
+// If the CLI or ENV specify a globalconfig, then that file is used
+// as the GLOBAL config.
+//
+// export npm_config_userconfig=/some/other/file
+// export npm_config_globalconfig=global
+//
+// For implementation reasons, "_" in env vars is turned into "-". So,
+// export npm_config_node_version
+
+exports.resolveConfigs = resolveConfigs
+exports.save = save
+exports.saveConfig = saveConfig
+exports.del = del
+exports.get = get
+exports.set = set
+exports.unParseField = unParseField
+exports.defaultConfig = null
+
+Object.defineProperty(exports, "keys",
+ { get : function () { return configList.keys }})
+
+var fs = require("graceful-fs")
+ , path = require("path")
+ , nopt = require("nopt")
+ , ini = require("ini")
+ , ProtoList = require("proto-list")
+
+ , log = require("./log.js")
+ , configDefs = require("./config-defs.js")
+
+ , myUid = process.env.SUDO_UID !== undefined
+ ? process.env.SUDO_UID : (process.getuid && process.getuid())
+ , myGid = process.env.SUDO_GID !== undefined
+ ? process.env.SUDO_GID : (process.getgid && process.getgid())
+ , eol = process.platform === "win32" ? "\r\n" : "\n"
+ , privateKey = null
+ , defaultConfig
+ , configList = new ProtoList()
+ , types = configDefs.types
+ , TRANS = exports.TRANS =
+ { "default" : 5
+ , "builtin": 4
+ , "global" : 3
+ , "user" : 2
+ , "env" : 1
+ , "cli" : 0
+ }
+
+exports.configList = configList
+
+// just put this here for a moment, so that the logs
+// in the config-loading phase don't cause it to blow up.
+configList.push({loglevel:"warn"})
+
+function resolveConfigs (cli, cb_) {
+ defaultConfig = defaultConfig || configDefs.defaults
+ exports.defaultConfig = defaultConfig
+ configList.pop()
+ configList.push(defaultConfig)
+ var cl = configList
+ , dc = cl.pop()
+ if (!cb_) cb_ = cli, cli = {}
+
+ function cb (er) {
+ //console.error("resolving configs")
+ exports.resolved = true
+ cb_(er)
+ }
+
+ cl.list.length = 0
+ Object.keys(cli).forEach(function (k) {
+ cli[k] = parseField(cli[k], k)
+ })
+ cl.push(cli)
+ cl.push(parseEnv(process.env))
+ parseFile(cl.get("userconfig") || dc.userconfig, function (er, conf) {
+ if (er) return cb(er)
+ cl.push(conf)
+ parseFile( cl.get("globalconfig") || dc.globalconfig
+ , function (er, conf) {
+ if (er) return cb(er)
+ cl.push(conf)
+ // the builtin config file, for distros to use.
+ parseFile(path.resolve(__dirname, "../../npmrc"), function (er, conf) {
+ if (er) conf = {}
+ cl.push(conf)
+ cl.push(dc)
+ setUser(cl, dc, thenValidate(cl, cb))
+ })
+ })
+ })
+}
+
+function thenValidate (cl, cb) { return function (er) {
+ if (er) return cb(er)
+
+ // warn about invalid configs at every level.
+ cl.list.forEach(function (conf, level) {
+ // clean(data, types, typeDefs)
+ nopt.clean(conf, configDefs.types)
+ })
+
+ cb()
+}}
+
+function setUser (cl, dc, cb) {
+ // If global, leave it as-is.
+ // If not global, then set the user to the owner of the prefix folder.
+ // Just set the default, so it can be overridden.
+ //console.error("setUser "+cl.get("global")+" "+ cb.toString())
+ if (cl.get("global")) return cb()
+ if (process.env.SUDO_UID) {
+ //console.error("uid="+process.env.SUDO_UID)
+ dc.user = +(process.env.SUDO_UID)
+ return cb()
+ }
+ //console.error("prefix="+cl.get("prefix"))
+ fs.stat(path.resolve(cl.get("prefix")), function (er, st) {
+ if (er) {
+ return log.er(cb, "prefix directory not found")(er)
+ }
+ dc.user = st.uid
+ return cb()
+ })
+}
+
+function parseEnv (env) {
+ var conf = {}
+ Object.keys(env)
+ .filter(function (k) { return k.match(/^npm_config_[^_]/i) })
+ .forEach(function (k) {
+ conf[k.replace(/^npm_config_/i, "")
+ .toLowerCase()
+ .replace(/_/g, "-")] = parseField(env[k], k)
+ })
+ return conf
+}
+
+function unParseField (f, k) {
+ // type can be an array or single thing.
+ var isPath = -1 !== [].concat(types[k]).indexOf(path)
+ if (isPath) {
+ if (typeof process.env.HOME !== 'undefined') {
+ if (process.env.HOME.substr(-1) === "/") {
+ process.env.HOME = process.env.HOME(0, process.env.HOME.length-1)
+ }
+ if (f.indexOf(process.env.HOME) === 0) {
+ f = "~"+f.substr(process.env.HOME.length)
+ }
+ }
+ }
+ return ini.safe(f)
+}
+
+function parseField (f, k, emptyIsFalse) {
+ if (typeof f !== "string" && !(f instanceof String)) return f
+ // type can be an array or single thing.
+ var isPath = -1 !== [].concat(types[k]).indexOf(path)
+ , isBool = -1 !== [].concat(types[k]).indexOf(Boolean)
+ , isString = -1 !== [].concat(types[k]).indexOf(String)
+ f = ini.unsafe((""+f).trim())
+ if (isBool && !isString && f === "") return f = true
+ switch (f) {
+ case "true": return true
+ case "false": return false
+ case "null": return null
+ case "undefined": return undefined
+ }
+ if (isPath) {
+ if (f.substr(0, 2) === "~/" && process.env.HOME) {
+ f = path.resolve(process.env.HOME, f.substr(2))
+ }
+ f = path.resolve(f)
+ }
+ return f
+}
+
+function parseFile (file, cb) {
+ if (!file) return cb(null, {})
+ log.verbose(file, "config file")
+ fs.readFile(file, function (er, data) {
+ // treat all errors as just an empty file
+ if (er) return cb(null, {})
+ var d = ini.parse(""+data)
+ , f = {}
+ Object.keys(d).forEach(function (k) {
+ f[k] = parseField(d[k], k)
+ })
+ cb(null, parseAuth(f))
+ })
+}
+
+function encryptAuth (config, cb) {
+ if (config.username && config._password) {
+ var b = new Buffer(config.username+":"+config._password)
+ config._auth = b.toString("base64")
+ }
+ delete config.username
+ delete config._password
+ return cb(null, config)
+}
+
+function parseAuth (config) {
+ //console.error("parsing config %j", config)
+ if (!config._auth) return config
+ var b = new Buffer(config._auth, "base64")
+ , unpw = b.toString().split(":")
+ , un = unpw.shift()
+ , pw = unpw.join(":")
+ config.username = un = (config.username || un)
+ config._password = pw = (config._password || pw)
+ b = new Buffer(un + ":" + pw)
+ config._auth = b.toString("base64")
+ return config
+}
+
+function save (which, cb) {
+ if (typeof which === "function") cb = which, which = null
+ if (!which) which = ["global", "user", "builtin"]
+ if (!Array.isArray(which)) which = [which]
+ var errState = null
+ , done = which.length
+ , failed = []
+ which.forEach(function (c) {
+ saveConfig(c, function (er) {
+ if (errState) return
+ if (er) return cb(errState = er)
+ if (-- done === 0) return cb()
+ })
+ })
+}
+
+function saveConfig (which, file, cb) {
+ if (typeof file === "function") cb = file, file = null
+ if (!file) {
+ switch (which) {
+ case "builtin":
+ file = path.resolve(__dirname, "../../npmrc")
+ break
+ case "global":
+ file = configList.get("globalconfig")
+ break
+ default:
+ file = configList.get("userconfig")
+ which = "user"
+ }
+ }
+
+ saveConfigfile
+ ( file
+ , configList.list[TRANS[which]]
+ , which
+ , cb )
+}
+
+function saveConfigfile (file, config, which, cb) {
+ encryptAuth(config, function () { // ignore errors
+ var data = {}
+ Object.keys(config).forEach(function (k) {
+ data[k] = unParseField(config[k], k)
+ })
+ data = ini.stringify(data)
+ return (data.trim())
+ ? writeConfigfile(file, data, which, cb)
+ : rmConfigfile(file, cb)
+ })
+}
+function writeConfigfile (configfile, data, which, cb) {
+ data = data.split(/\r*\n/).join(eol)
+ fs.writeFile
+ ( configfile, data, "utf8"
+ , function (er) {
+ if (er) log(er, "Failed saving "+configfile, cb)
+ else if (which) {
+ fs.chmod(configfile, which === "user" ? 0600 : 0644, function (e) {
+ if (e || which !== "user" || typeof myUid !== "number") {
+ return cb(e)
+ }
+ fs.chown(configfile, +myUid, +myGid, cb)
+ })
+ }
+ else cb()
+ }
+ )
+}
+function rmConfigfile (configfile, cb) {
+ fs.stat(configfile, function (e) {
+ if (e) return cb()
+ fs.unlink(configfile, function (er) {
+ if (er) log(er, "Couldn't remove "+configfile)
+ cb()
+ })
+ })
+}
+function snapshot (which) {
+ var x = (!which) ? configList.snapshot
+ : configList.list[TRANS[which]] ? configList.list[TRANS[which]]
+ : undefined
+ if (!x) return
+ Object.keys(x).forEach(function (k) { if (k.match(/^_/)) delete x[k] })
+ return x
+}
+function get (key, which) {
+ return (!key) ? snapshot(which)
+ : (!which) ? configList.get(key) // resolved
+ : configList.list[TRANS[which]] ? configList.list[TRANS[which]][key]
+ : undefined
+}
+function del (key, which) {
+ if (!which) configList.list.forEach(function (l) {
+ delete l[key]
+ })
+ else if (configList.list[TRANS[which]]) {
+ delete configList.list[TRANS[which]]
+ }
+}
+function set (key, value, which) {
+ which = which || "cli"
+ if (configList.length === 1) {
+ return new Error("trying to set before loading")
+ }
+ return configList.list[TRANS[which]][key] = value
+}
diff --git a/deps/npm/lib/utils/lifecycle.js b/deps/npm/lib/utils/lifecycle.js
new file mode 100644
index 0000000000..878fc88cf3
--- /dev/null
+++ b/deps/npm/lib/utils/lifecycle.js
@@ -0,0 +1,278 @@
+
+exports = module.exports = lifecycle
+exports.cmd = cmd
+
+var log = require("./log.js")
+ , exec = require("./exec.js")
+ , npm = require("../npm.js")
+ , path = require("path")
+ , readJson = require("./read-json.js")
+ , fs = require("graceful-fs")
+ , chain = require("slide").chain
+ , constants = require("constants")
+ , output = require("./output.js")
+
+function lifecycle (pkg, stage, wd, unsafe, failOk, cb) {
+ if (typeof cb !== "function") cb = failOk, failOk = false
+ if (typeof cb !== "function") cb = unsafe, unsafe = false
+ if (typeof cb !== "function") cb = wd, wd = null
+
+ while (pkg && pkg._data) pkg = pkg._data
+ if (!pkg) return cb(new Error("Invalid package data"))
+
+ log(pkg._id, stage)
+ if (!pkg.scripts) pkg.scripts = {}
+
+ validWd(wd || path.resolve(npm.dir, pkg.name), function (er, wd) {
+ if (er) return cb(er)
+
+ unsafe = unsafe || npm.config.get("unsafe-perm")
+
+ if ((wd.indexOf(npm.dir) !== 0 || path.basename(wd) !== pkg.name)
+ && !unsafe && pkg.scripts[stage]) {
+ log.warn(pkg._id+" "+pkg.scripts[stage], "skipping, cannot run in "+wd)
+ return cb()
+ }
+
+ // set the env variables, then run scripts as a child process.
+ var env = makeEnv(pkg)
+ env.npm_lifecycle_event = stage
+
+ // "nobody" typically doesn't have permission to write to /tmp
+ // even if it's never used, sh freaks out.
+ if (!npm.config.get("unsafe-perm")) env.TMPDIR = wd
+
+ lifecycle_(pkg, stage, wd, env, unsafe, failOk, cb)
+ })
+}
+
+function checkForLink (pkg, cb) {
+ var f = path.join(npm.dir, pkg.name)
+ fs.lstat(f, function (er, s) {
+ cb(null, !(er || !s.isSymbolicLink()))
+ })
+}
+
+function lifecycle_ (pkg, stage, wd, env, unsafe, failOk, cb) {
+ var PATH = []
+ , p = wd.split("node_modules")
+ , acc = path.resolve(p.shift())
+ p.forEach(function (pp) {
+ PATH.unshift(path.join(acc, "node_modules", ".bin"))
+ acc = path.join(acc, "node_modules", pp)
+ })
+ PATH.unshift(path.join(acc, "node_modules", ".bin"))
+ if (env.PATH) PATH.push(env.PATH)
+ env.PATH = PATH.join(process.platform === "win32" ? ";" : ":")
+
+ var packageLifecycle = pkg.scripts && pkg.scripts.hasOwnProperty(stage)
+
+ if (packageLifecycle) {
+ // define this here so it's available to all scripts.
+ env.npm_lifecycle_script = pkg.scripts[stage]
+ }
+
+ if (failOk) {
+ cb = (function (cb_) { return function (er) {
+ if (er) log.warn(er.message, "continuing anyway")
+ cb_()
+ }})(cb)
+ }
+
+ if (npm.config.get("force")) {
+ cb = (function (cb_) { return function (er) {
+ if (er) log(er, "forced, continuing")
+ cb_()
+ }})(cb)
+ }
+
+ chain
+ ( [ packageLifecycle && [runPackageLifecycle, pkg, env, wd, unsafe]
+ , [runHookLifecycle, pkg, env, wd, unsafe] ]
+ , cb )
+}
+
+function validWd (d, cb) {
+ fs.stat(d, function (er, st) {
+ if (er || !st.isDirectory()) {
+ var p = path.dirname(d)
+ if (p === d) {
+ return cb(new Error("Could not find suitable wd"))
+ }
+ return validWd(p, cb)
+ }
+ return cb(null, d)
+ })
+}
+
+function runPackageLifecycle (pkg, env, wd, unsafe, cb) {
+ // run package lifecycle scripts in the package root, or the nearest parent.
+ var stage = env.npm_lifecycle_event
+ , user = unsafe ? null : npm.config.get("user")
+ , group = unsafe ? null : npm.config.get("group")
+ , cmd = env.npm_lifecycle_script
+ , sh = "sh"
+ , shFlag = "-c"
+
+ if (process.platform === "win32") {
+ sh = "cmd"
+ shFlag = "/c"
+ }
+
+ log.verbose(unsafe, "unsafe-perm in lifecycle")
+
+ output.write("\n> "+pkg._id+" " + stage+" "+wd+"\n> "+cmd+"\n", function (er) {
+ if (er) return cb(er)
+
+ exec( sh, [shFlag, cmd], env, true, wd
+ , user, group
+ , function (er, code, stdout, stderr) {
+ if (er && !npm.ROLLBACK) {
+ log("Failed to exec "+stage+" script", pkg._id)
+ er.message = pkg._id + " "
+ + stage + ": `" + env.npm_lifecycle_script+"`\n"
+ + er.message
+ if (er.errno !== constants.EPERM) {
+ er.errno = npm.ELIFECYCLE
+ }
+ er.pkgid = pkg._id
+ er.stage = stage
+ er.script = env.npm_lifecycle_script
+ er.pkgname = pkg.name
+ return cb(er)
+ } else if (er) {
+ log.error(er, pkg._id+"."+stage)
+ log.error("failed, but continuing anyway", pkg._id+"."+stage)
+ return cb()
+ }
+ cb(er)
+ })
+ })
+}
+
+function runHookLifecycle (pkg, env, wd, unsafe, cb) {
+ // check for a hook script, run if present.
+ var stage = env.npm_lifecycle_event
+ , hook = path.join(npm.dir, ".hooks", stage)
+ , user = unsafe ? null : npm.config.get("user")
+ , group = unsafe ? null : npm.config.get("group")
+ , cmd = hook
+
+ fs.stat(hook, function (er) {
+ if (er) return cb()
+
+ exec( "sh", ["-c", cmd], env, true, wd
+ , user, group
+ , function (er) {
+ if (er) {
+ er.message += "\nFailed to exec "+stage+" hook script"
+ log(er, pkg._id)
+ }
+ if (npm.ROLLBACK) return cb()
+ cb(er)
+ })
+ })
+}
+
+function makeEnv (data, prefix, env) {
+ prefix = prefix || "npm_package_"
+ if (!env) {
+ env = {}
+ for (var i in process.env) if (!i.match(/^npm_/)) {
+ env[i] = process.env[i]
+ }
+
+ // npat asks for tap output
+ if (npm.config.get("npat")) env.TAP = 1
+
+ // express and others respect the NODE_ENV value.
+ if (npm.config.get("production")) env.NODE_ENV = "production"
+
+ } else if (!data.hasOwnProperty("_lifecycleEnv")) {
+ Object.defineProperty(data, "_lifecycleEnv",
+ { value : env
+ , enumerable : false
+ })
+ }
+
+ for (var i in data) if (i.charAt(0) !== "_") {
+ var envKey = (prefix+i).replace(/[^a-zA-Z0-9_]/g, '_')
+ if (data[i] && typeof(data[i]) === "object") {
+ try {
+ // quick and dirty detection for cyclical structures
+ JSON.stringify(data[i])
+ makeEnv(data[i], envKey+"_", env)
+ } catch (ex) {
+ // usually these are package objects.
+ // just get the path and basic details.
+ var d = data[i]
+ makeEnv( { name: d.name, version: d.version, path:d.path }
+ , envKey+"_", env)
+ }
+ } else {
+ env[envKey] = String(data[i])
+ env[envKey] = -1 !== env[envKey].indexOf("\n")
+ ? JSON.stringify(env[envKey])
+ : env[envKey]
+ }
+
+ }
+
+ if (prefix !== "npm_package_") return env
+
+ prefix = "npm_config_"
+ var pkgConfig = {}
+ , ini = require("./ini.js")
+ , keys = ini.keys
+ , pkgVerConfig = {}
+ , namePref = data.name + ":"
+ , verPref = data.name + "@" + data.version + ":"
+
+ keys.forEach(function (i) {
+ if (i.charAt(0) === "_" && i.indexOf("_"+namePref) !== 0) {
+ return
+ }
+ var value = String(ini.get(i))
+ value = -1 !== value.indexOf("\n")
+ ? JSON.stringify(value)
+ : value
+ i = i.replace(/^_+/, "")
+ if (i.indexOf(namePref) === 0) {
+ var k = i.substr(namePref.length).replace(/[^a-zA-Z0-9_]/g, "_")
+ pkgConfig[ k ] = value
+ } else if (i.indexOf(verPref) === 0) {
+ var k = i.substr(verPref.length).replace(/[^a-zA-Z0-9_]/g, "_")
+ pkgVerConfig[ k ] = value
+ }
+ var envKey = (prefix+i).replace(/[^a-zA-Z0-9_]/g, "_")
+ env[envKey] = value
+ })
+
+ prefix = "npm_package_config_"
+ ;[pkgConfig, pkgVerConfig].forEach(function (conf) {
+ for (var i in conf) {
+ var envKey = (prefix+i)
+ env[envKey] = conf[i]
+ }
+ })
+
+ return env
+}
+
+function cmd (stage) {
+ function CMD (args, cb) {
+ if (args.length) {
+ chain(args.map(function (p) {
+ return [npm.commands, "run-script", [p, stage]]
+ }), cb)
+ } else npm.commands["run-script"]([stage], cb)
+ }
+ CMD.usage = "npm "+stage+" <name>"
+ var installedShallow = require("./completion/installed-shallow.js")
+ CMD.completion = function (opts, cb) {
+ installedShallow(opts, function (d) {
+ return d.scripts && d.scripts[stage]
+ }, cb)
+ }
+ return CMD
+}
diff --git a/deps/npm/lib/utils/link.js b/deps/npm/lib/utils/link.js
new file mode 100644
index 0000000000..9be1221f0a
--- /dev/null
+++ b/deps/npm/lib/utils/link.js
@@ -0,0 +1,30 @@
+
+module.exports = link
+link.ifExists = linkIfExists
+
+var fs = require("graceful-fs")
+ , chain = require("slide").chain
+ , mkdir = require("./mkdir-p.js")
+ , rm = require("rimraf")
+ , log = require("./log.js")
+ , path = require("path")
+ , relativize = require("./relativize.js")
+ , npm = require("../npm.js")
+
+function linkIfExists (from, to, gently, cb) {
+ fs.stat(from, function (er) {
+ if (er) return cb()
+ link(from, to, gently, cb)
+ })
+}
+
+function link (from, to, gently, cb) {
+ if (typeof cb !== "function") cb = gently, gently = null
+ if (npm.config.get("force")) gently = false
+ chain
+ ( [ [fs, "stat", from]
+ , [rm, to, { gently: gently }]
+ , [mkdir, path.dirname(to)]
+ , [fs, "symlink", relativize(from, to), to] ]
+ , cb)
+}
diff --git a/deps/npm/lib/utils/load-package-defaults.js b/deps/npm/lib/utils/load-package-defaults.js
new file mode 100644
index 0000000000..180507a445
--- /dev/null
+++ b/deps/npm/lib/utils/load-package-defaults.js
@@ -0,0 +1,75 @@
+
+module.exports = loadPackageDefaults
+
+var path = require("path")
+ , log = require("./log.js")
+ , find = require("./find.js")
+ , asyncMap = require("slide").asyncMap
+ , npm = require("../npm.js")
+ , fs = require("graceful-fs")
+
+function loadPackageDefaults (pkg, pkgDir, cb) {
+ if (!pkg) return cb(new Error("no package to load defaults from!"))
+ if (typeof pkgDir === "function") {
+ cb = pkgDir
+ pkgDir = path.join(npm.dir, pkg.name, pkg.version, "package")
+ }
+ if (!pkgDir) pkgDir = "."
+
+ if (pkg._defaultsLoaded) return cb(null, pkg)
+
+ pkg._defaultsLoaded = true
+ asyncMap
+ ( [pkg]
+ , function (pkg, cb) { log.verbose(pkg._id, "loadDefaults", cb) }
+ , readDefaultBins(pkgDir)
+ , readDefaultMans(pkgDir)
+ , function (er) { cb(er, pkg) } )
+}
+
+function objectForEach (obj, fn) {
+ Object.keys(obj).forEach(function (k) {
+ fn(obj[k])
+ })
+}
+
+function readDefaultMans (pkgDir) { return function (pkg, cb) {
+ var man = pkg.directories && pkg.directories.man
+ , manDir = path.join(pkgDir, man)
+ if (pkg.man && !Array.isArray(pkg.man)) pkg.man = [pkg.man]
+ if (pkg.man || !man) return cb(null, pkg)
+ find(manDir, /\.[0-9]+(\.gz)?$/, function (er, filenames) {
+ if (er) return cb(er)
+ var cut = pkgDir === "." ? 0 : pkgDir.length + 1
+ pkg.man = (filenames || []).map(function (filename) {
+ return filename.substr(cut)
+ }).filter(function (f) {
+ return !f.match(/(^|\/)\./)
+ })
+ cb(null,pkg)
+ })
+}}
+
+function readDefaultBins (pkgDir) { return function (pkg, cb) {
+ var bin = pkg.directories && pkg.directories.bin
+ if (pkg.bins) pkg.bin = pkg.bins, delete pkg.bins
+ if (pkg.bin || !bin) return cb(null, pkg)
+ log.verbose("linking default bins", pkg._id)
+ var binDir = path.join(pkgDir, bin)
+ pkg.bin = {}
+ find(binDir, function (er, filenames) {
+ if (er || !filenames || !filenames.length) return cb(er, pkg)
+ var cut = pkgDir === "." ? 0 : pkgDir.length + 1
+ , binCut = pkgDir === "." ? bin.length - 1 : binDir.length + 1
+ filenames.filter(function (f) {
+ return !f.substr(binCut).match(/(^|\/)\./)
+ }).forEach(function (filename) {
+ var key = filename.substr(binCut)
+ .replace(/\.(js|node)$/, '')
+ , val = filename.substr(cut)
+ if (key.length && val.length) pkg.bin[key] = val
+ })
+ log.silly(pkg.bin, pkg._id+".bin")
+ cb(null, pkg)
+ })
+}}
diff --git a/deps/npm/lib/utils/log.js b/deps/npm/lib/utils/log.js
new file mode 100644
index 0000000000..dece9e3d19
--- /dev/null
+++ b/deps/npm/lib/utils/log.js
@@ -0,0 +1,185 @@
+
+/*
+log levels:
+0,1,2,3
+verbose,info,warn,error
+
+Default setting for logs is "info"
+Default setting to show is "info"
+
+Possible values of level/loglevel:
+silly,verbose,info,warn,error,win,silent
+
+silent quiets everything
+
+
+*/
+
+
+module.exports = log
+
+var output = require("./output.js")
+
+function colorize (msg, color) {
+ return msg ? "\033["+color+"m"+msg+"\033[0m" : ""
+}
+
+var l = -1
+ , LEVEL = { silly : l++
+ , verbose : l++
+ , info : l++
+ , WARN : l++
+ , "ERR!" : l++
+ , ERROR : "ERR!"
+ , ERR : "ERR!"
+ , win : 0x15AAC5
+ , paused : 0x19790701
+ , silent : 0xDECAFBAD
+ }
+ , COLOR = {}
+ , SHOWLEVEL = null
+ , normalNames = {}
+log.LEVEL = LEVEL
+normalNames[LEVEL["ERR!"]] = "error"
+normalNames[LEVEL.WARN] = "warn"
+normalNames[LEVEL.info] = "info"
+normalNames[LEVEL.verbose] = "verbose"
+normalNames[LEVEL.silly] = "silly"
+normalNames[LEVEL.win] = "win"
+
+Object.keys(LEVEL).forEach(function (l) {
+ if (typeof LEVEL[l] === "string") LEVEL[l] = LEVEL[LEVEL[l]]
+ else LEVEL[LEVEL[l]] = l
+ LEVEL[l.toLowerCase()] = LEVEL[l]
+ if (l === "silent" || l === "paused") return
+ log[l] = log[l.toLowerCase()] =
+ function (msg, pref, cb) { return log(msg, pref, l, cb) }
+})
+
+COLOR[LEVEL.silly] = 30
+COLOR[LEVEL.verbose] = "34;40"
+COLOR[LEVEL.info] = 32
+COLOR[LEVEL.warn] = "30;41"
+COLOR[LEVEL.error] = "31;40"
+for (var c in COLOR) COLOR[LEVEL[c]] = COLOR[c]
+COLOR.npm = "37;40"
+COLOR.pref = 35
+
+var logBuffer = []
+ , ini = require("./ini.js")
+ , waitForConfig
+log.waitForConfig = function () { waitForConfig = true }
+
+// now the required stuff has been loaded,
+// so the transitive module dep will work
+var util = require("util")
+ , npm = require("../npm.js")
+ , net = require("net")
+
+Object.defineProperty(log, "level",
+ { get : function () {
+ if (SHOWLEVEL !== null) return SHOWLEVEL
+ var show = npm.config && npm.config.get("loglevel") || ''
+ show = show.split(",")[0]
+ if (!isNaN(show)) show = +show
+ else if (!LEVEL.hasOwnProperty(show)) {
+ util.error("Invalid loglevel config: "+JSON.stringify(show))
+ show = "info"
+ }
+ if (isNaN(show)) show = LEVEL[show]
+ else show = +show
+ if (!waitForConfig || ini.resolved) SHOWLEVEL = show
+ return show
+ }
+ , set : function (l) {
+ SHOWLEVEL = null
+ npm.config.set("showlevel", l)
+ }
+ })
+
+function log (msg, pref, level, cb) {
+ if (typeof level === "function") cb = level, level = null
+ var show = log.level
+ if (show === LEVEL.silent || show === LEVEL.paused) return cb && cb()
+ if (level == null) level = LEVEL.info
+ if (isNaN(level)) level = LEVEL[level]
+ else level = +level
+
+ // logging just undefined is almost never the right thing.
+ // a lot of these are kicking around throughout the codebase
+ // with relatively unhelpful prefixes.
+ if (msg === undefined && level > LEVEL.silly) {
+ msg = new Error("undefined log message")
+ }
+ if (typeof msg === "object" && (msg instanceof Error)) level = LEVEL.error
+ if (!ini.resolved && waitForConfig || level === LEVEL.paused) {
+ return logBuffer.push([msg, pref, level, cb])
+ }
+ if (logBuffer.length && !logBuffer.discharging) {
+ logBuffer.push([msg, pref, level, cb])
+ logBuffer.discharging = true
+ logBuffer.forEach(function (l) { log.apply(null, l) })
+ logBuffer.length = 0
+ delete logBuffer.discharging
+ return
+ }
+ log.level = show
+ npm.emit("log", { level : level, msg : msg, pref : pref, cb : cb })
+ npm.emit("log."+normalNames[level], { msg : msg, pref : pref, cb : cb })
+}
+
+var loglog = log.history = []
+ , loglogLen = 0
+npm.on("log", function (logData) {
+ var level = logData.level
+ , msg = logData.msg
+ , pref = logData.pref
+ , cb = logData.cb || function () {}
+ , show = log.level
+ , spaces = " "
+ , logFD = npm.config.get("logfd")
+ if (msg instanceof Error) {
+ msg = logData.msg = msg.stack || msg.toString()
+ }
+ loglog.push(logData)
+ loglogLen ++
+ if (loglogLen > 2000) {
+ loglog = loglog.slice(loglogLen - 1000)
+ loglogLen = 1000
+ }
+ if (!isFinite(level) || level < show) return cb()
+ if (typeof msg !== "string" && !(msg instanceof Error)) {
+ msg = util.inspect(msg, 0, 4, true)
+ }
+
+ // console.error("level, showlevel, show", level, show, (level >= show))
+ if (pref && COLOR.pref) {
+ pref = colorize(pref, COLOR.pref)
+ }
+ if (!pref) pref = ""
+
+ if (npm.config.get("logprefix")) {
+ pref = colorize("npm", COLOR.npm)
+ + (COLOR[level] ? " "+colorize(
+ (LEVEL[level]+spaces).substr(0,4), COLOR[level]) : "")
+ + (pref ? (" " + pref) : "")
+ }
+ if (pref) pref += " "
+
+
+
+ if (msg.indexOf("\n") !== -1) {
+ msg = msg.split(/\n/).join("\n"+pref)
+ }
+ msg = pref+msg
+ return output.write(msg, logFD, cb)
+})
+
+log.er = function (cb, msg) {
+ if (!msg) throw new Error(
+ "Why bother logging it if you're not going to print a message?")
+ return function (er) {
+ if (er) log.error(msg)
+ cb.apply(this, arguments)
+ }
+}
diff --git a/deps/npm/lib/utils/mkdir-p.js b/deps/npm/lib/utils/mkdir-p.js
new file mode 100644
index 0000000000..cc2b465fb6
--- /dev/null
+++ b/deps/npm/lib/utils/mkdir-p.js
@@ -0,0 +1,191 @@
+
+var log = require("./log.js")
+ , fs = require("graceful-fs")
+ , path = require("path")
+ , npm = require("../npm.js")
+ , exec = require("./exec.js")
+ , uidNumber = require("./uid-number.js")
+ , umask = process.umask()
+ , umaskOrig = umask
+ , addedUmaskExit = false
+ , mkdirCache = {}
+
+module.exports = mkdir
+function mkdir (ensure, mode, uid, gid, noChmod, cb_) {
+ if (typeof cb_ !== "function") cb_ = noChmod, noChmod = null
+ if (typeof cb_ !== "function") cb_ = gid, gid = null
+ if (typeof cb_ !== "function") cb_ = uid, uid = null
+ if (typeof cb_ !== "function") cb_ = mode, mode = npm.modes.exec
+
+ if (mode & umask) {
+ log.verbose(mode.toString(8), "umasking from "+umask.toString(8))
+ process.umask(umask = 0)
+ if (!addedUmaskExit) {
+ addedUmaskExit = true
+ process.on("exit", function () { process.umask(umask = umaskOrig) })
+ }
+ }
+
+ ensure = path.resolve(ensure).replace(/\/+$/, '')
+
+ // mkdir("/") should not do anything, since that always exists.
+ if (!ensure
+ || ( process.platform === "win32"
+ && ensure.match(/^[a-zA-Z]:(\\|\/)?$/))) {
+ return cb_()
+ }
+
+ if (mkdirCache.hasOwnProperty(ensure)) {
+ return mkdirCache[ensure].push(cb_)
+ }
+ mkdirCache[ensure] = [cb_]
+
+ function cb (er) {
+ var cbs = mkdirCache[ensure]
+ delete mkdirCache[ensure]
+ cbs.forEach(function (c) { c(er) })
+ }
+
+ if (uid === null && gid === null) {
+ return mkdir_(ensure, mode, uid, gid, noChmod, cb)
+ }
+
+ uidNumber(uid, gid, function (er, uid, gid) {
+ if (er) return cb(er)
+ mkdir_(ensure, mode, uid, gid, noChmod, cb)
+ })
+}
+
+function mkdir_ (ensure, mode, uid, gid, noChmod, cb) {
+ // if it's already a dir, then just check the bits and owner.
+ fs.stat(ensure, function (er, s) {
+ if (s && s.isDirectory()) {
+ // check mode, uid, and gid.
+ if ((noChmod || (s.mode & mode) === mode)
+ && (typeof uid !== "number" || s.uid === uid)
+ && (typeof gid !== "number" || s.gid === gid)) return cb()
+ return done(ensure, mode, uid, gid, noChmod, cb)
+ }
+ return walkDirs(ensure, mode, uid, gid, noChmod, cb)
+ })
+}
+
+function done (ensure, mode, uid, gid, noChmod, cb) {
+ // now the directory has been created.
+ // chown it to the desired uid/gid
+ // Don't chown the npm.root dir, though, in case we're
+ // in unsafe-perm mode.
+ log.verbose("done: "+ensure+" "+mode.toString(8), "mkdir")
+
+ // only chmod if noChmod isn't set.
+ var d = done_(ensure, mode, uid, gid, cb)
+ if (noChmod) return d()
+ fs.chmod(ensure, mode, d)
+}
+
+function done_ (ensure, mode, uid, gid, cb) {
+ return function (er) {
+ if (er
+ || ensure === npm.dir
+ || typeof uid !== "number"
+ || typeof gid !== "number"
+ || npm.config.get("unsafe-perm")) return cb(er)
+ uid = Math.floor(uid)
+ gid = Math.floor(gid)
+ fs.chown(ensure, uid, gid, cb)
+ }
+}
+
+var pathSplit = process.platform === "win32" ? /\/|\\/ : "/"
+function walkDirs (ensure, mode, uid, gid, noChmod, cb) {
+ var dirs = ensure.split(pathSplit)
+ , walker = []
+ , foundUID = null
+ , foundGID = null
+
+ // gobble the "/" or C: first
+ walker.push(dirs.shift())
+
+ // The loop that goes through and stats each dir.
+ ;(function S (d) {
+ // no more directory steps left.
+ if (d === undefined) {
+ // do the chown stuff
+ return done(ensure, mode, uid, gid, noChmod, cb)
+ }
+
+ // get the absolute dir for the next piece being stat'd
+ walker.push(d)
+ var dir = walker.join(path.SPLIT_CHAR)
+
+ // stat callback lambda
+ fs.stat(dir, function STATCB (er, s) {
+ if (er) {
+ // the stat failed - directory does not exist.
+
+ log.verbose(er.message, "mkdir (expected) error")
+
+ // use the same uid/gid as the nearest parent, if not set.
+ if (foundUID !== null) uid = foundUID
+ if (foundGID !== null) gid = foundGID
+
+ // make the directory
+ fs.mkdir(dir, mode, function MKDIRCB (er) {
+ // since stat and mkdir are done as two separate syscalls,
+ // operating on a path rather than a file descriptor, it's
+ // possible that the directory didn't exist when we did
+ // the stat, but then *did* exist when we go to to the mkdir.
+ // If we didn't care about uid/gid, we could just mkdir
+ // repeatedly, failing on any error other than "EEXIST".
+ if (er && er.message.indexOf("EEXIST") === 0) {
+ return fs.stat(dir, STATCB)
+ }
+
+ // any other kind of error is not saveable.
+ if (er) return cb(er)
+
+ // at this point, we've just created a new directory successfully.
+
+ // if we care about permissions
+ if (!npm.config.get("unsafe-perm") // care about permissions
+ // specified a uid and gid
+ && uid !== null
+ && gid !== null ) {
+ // set the proper ownership
+ return fs.chown(dir, uid, gid, function (er) {
+ if (er) return cb(er)
+ // attack the next portion of the path.
+ S(dirs.shift())
+ })
+ } else {
+ // either we don't care about ownership, or it's already right.
+ S(dirs.shift())
+ }
+ }) // mkdir
+
+ } else {
+ // the stat succeeded.
+ if (s.isDirectory()) {
+ // if it's a directory, that's good.
+ // if the uid and gid aren't already set, then try to preserve
+ // the ownership on up the tree. Things in ~ remain owned by
+ // the user, things in / remain owned by root, etc.
+ if (uid === null && typeof s.uid === "number") foundUID = s.uid
+ if (gid === null && typeof s.gid === "number") foundGID = s.gid
+
+ // move onto next portion of path
+ S(dirs.shift())
+
+ } else {
+ // the stat succeeded, but it's not a directory
+ log.verbose(dir, "mkdir exists")
+ log.silly(s, "stat("+dir+")")
+ log.verbose(s.isDirectory(), "isDirectory()")
+ cb(new Error("Failed to mkdir "+dir+": File exists"))
+ }// if (isDirectory) else
+ } // if (stat failed) else
+ }) // stat
+
+ // start the S function with the first item in the list of directories.
+ })(dirs.shift())
+}
diff --git a/deps/npm/lib/utils/npm-registry-client/adduser.js b/deps/npm/lib/utils/npm-registry-client/adduser.js
new file mode 100644
index 0000000000..5e6794a758
--- /dev/null
+++ b/deps/npm/lib/utils/npm-registry-client/adduser.js
@@ -0,0 +1,100 @@
+
+module.exports = adduser
+
+var uuid = require("node-uuid")
+ , request = require("./request.js")
+ , log = require("../log.js")
+ , npm = require("../../npm.js")
+ , crypto
+
+try {
+ crypto = process.binding("crypto") && require("crypto")
+} catch (ex) {}
+
+function sha (s) {
+ return crypto.createHash("sha1").update(s).digest("hex")
+}
+
+function adduser (username, password, email, cb) {
+ if (!crypto) return cb(new Error(
+ "You must compile node with ssl support to use the adduser feature"))
+
+ password = ("" + (password || "")).trim()
+ if (!password) return cb(new Error("No password supplied."))
+
+ email = ("" + (email || "")).trim()
+ if (!email) return cb(new Error("No email address supplied."))
+ if (!email.match(/^[^@]+@[^\.]+\.[^\.]+/)) {
+ return cb(new Error("Please use a real email address."))
+ }
+
+ if (password.indexOf(":") !== -1) return cb(new Error(
+ "Sorry, ':' chars are not allowed in passwords.\n"+
+ "See <https://issues.apache.org/jira/browse/COUCHDB-969> for why."))
+ var salt = uuid()
+ , userobj =
+ { name : username
+ , salt : salt
+ , password_sha : sha(password + salt)
+ , email : email
+ , _id : 'org.couchdb.user:'+username
+ , type : "user"
+ , roles : []
+ , date: new Date().toISOString()
+ }
+ cb = done(cb)
+ log.verbose(userobj, "before first PUT")
+ request.PUT
+ ( '/-/user/org.couchdb.user:'+encodeURIComponent(username)
+ , userobj
+ , function (error, data, json, response) {
+ // if it worked, then we just created a new user, and all is well.
+ // but if we're updating a current record, then it'll 409 first
+ if (error && !npm.config.get("_auth")) {
+ // must be trying to re-auth on a new machine.
+ // use this info as auth
+ npm.config.set("username", username)
+ npm.config.set("_password", password)
+ var b = new Buffer(username + ":" + password)
+ npm.config.set("_auth", b.toString("base64"))
+ }
+ if (!error || !response || response.statusCode !== 409) {
+ return cb(error, data, json, response)
+ }
+ log.verbose("update existing user", "adduser")
+ return request.GET
+ ( '/-/user/org.couchdb.user:'+encodeURIComponent(username)
+ , function (er, data, json, response) {
+ userobj._rev = data._rev
+ userobj.roles = data.roles
+ log.verbose(userobj, "userobj")
+ request.PUT
+ ( '/-/user/org.couchdb.user:'+encodeURIComponent(username)
+ + "/-rev/" + userobj._rev
+ , userobj
+ , cb )
+ }
+ )
+ }
+ )
+}
+
+function done (cb) { return function (error, data, json, response) {
+ if (!error && (!response || response.statusCode === 201)) {
+ return cb(error, data, json, response)
+ }
+ log.verbose([error, data, json], "back from adduser")
+ if (!error) {
+ error = new Error( (response && response.statusCode || "") + " "+
+ "Could not create user\n"+JSON.stringify(data))
+ }
+ if (response
+ && (response.statusCode === 401 || response.statusCode === 403)) {
+ log.warn("Incorrect username or password\n"
+ +"You can reset your account by visiting:\n"
+ +"\n"
+ +" http://admin.npmjs.org/reset\n")
+ }
+
+ return cb(error)
+}}
diff --git a/deps/npm/lib/utils/npm-registry-client/get.js b/deps/npm/lib/utils/npm-registry-client/get.js
new file mode 100644
index 0000000000..062244df3a
--- /dev/null
+++ b/deps/npm/lib/utils/npm-registry-client/get.js
@@ -0,0 +1,171 @@
+
+module.exports = get
+
+var GET = require("./request.js").GET
+ , fs = require("graceful-fs")
+ , npm = require("../../npm.js")
+ , path = require("path")
+ , log = require("../log.js")
+ , mkdir = require("../mkdir-p.js")
+ , cacheStat = null
+
+function get (project, version, timeout, nofollow, staleOk, cb) {
+ if (typeof cb !== "function") cb = staleOk, staleOk = false
+ if (typeof cb !== "function") cb = nofollow, nofollow = false
+ if (typeof cb !== "function") cb = timeout, timeout = -1
+ if (typeof cb !== "function") cb = version, version = null
+ if (typeof cb !== "function") cb = project, project = null
+ if (typeof cb !== "function") {
+ throw new Error("No callback provided to registry.get")
+ }
+
+ if ( process.env.COMP_CWORD !== undefined
+ && process.env.COMP_LINE !== undefined
+ && process.env.COMP_POINT !== undefined
+ ) timeout = Math.max(timeout, 60000)
+
+ var uri = []
+ uri.push(project || "")
+ if (version) uri.push(version)
+ uri = uri.join("/")
+
+ // /-/all is special.
+ // It uses timestamp-based caching and partial updates,
+ // because it is a monster.
+ if (uri === "/-/all") {
+ return requestAll(cb)
+ }
+
+ var cache = path.join(npm.cache, uri, ".cache.json")
+ fs.stat(cache, function (er, stat) {
+ if (!er) fs.readFile(cache, function (er, data) {
+ try { data = JSON.parse(data) }
+ catch (ex) { data = null }
+ get_(uri, timeout, cache, stat, data, nofollow, staleOk, cb)
+ })
+ else get_(uri, timeout, cache, null, null, nofollow, staleOk, cb)
+ })
+}
+
+function requestAll (cb) {
+ var cache = path.join(npm.cache, "/-/all", ".cache.json")
+
+ mkdir(path.join(npm.cache, "-", "all"), function (er) {
+ fs.readFile(cache, function (er, data) {
+ if (er) return requestAll_(0, {}, cb)
+ try {
+ data = JSON.parse(data)
+ } catch (ex) {
+ fs.writeFile(cache, "{}", function (er) {
+ if (er) return cb(new Error("Broken cache. "
+ +"Please run 'npm cache clean' "
+ +"and try again."))
+ return requestAll_(0, {}, cb)
+ })
+ }
+ var t = +data._updated || 0
+ requestAll_(t, data, cb)
+ })
+ })
+}
+
+function requestAll_ (c, data, cb) {
+ // use the cache and update in the background if it's not too old
+ if (Date.now() - c < 60000) {
+ cb(null, data)
+ cb = function () {}
+ }
+
+ var uri = "/-/all/since?stale=update_after&startkey=" + c
+
+ if (c === 0) {
+ log.warn("Building the index for the first time, please be patient")
+ uri = "/-/all"
+ }
+
+ var cache = path.join(npm.cache, "-/all", ".cache.json")
+ GET(uri, function (er, updates, _, res) {
+ if (er) return cb(er, data)
+ var headers = res.headers
+ , updated = Date.parse(headers.date)
+ Object.keys(updates).forEach(function (p) {
+ data[p] = updates[p]
+ })
+ data._updated = updated
+ fs.writeFile( cache, JSON.stringify(data)
+ , function (er) {
+ delete data._updated
+ return cb(er, data)
+ })
+ })
+}
+
+function get_ (uri, timeout, cache, stat, data, nofollow, staleOk, cb) {
+ var etag
+ if (data && data._etag) etag = data._etag
+ if (timeout && timeout > 0 && stat && data) {
+ if ((Date.now() - stat.mtime.getTime())/1000 < timeout) {
+ log.verbose("not expired, no request", "registry.get " +uri)
+ delete data._etag
+ return cb(null, data, JSON.stringify(data), {statusCode:304})
+ }
+ if (staleOk) {
+ log.verbose("staleOk, background update", "registry.get " +uri)
+ delete data._etag
+ process.nextTick(cb.bind( null, null, data, JSON.stringify(data)
+ , {statusCode: 304} ))
+ cb = function () {}
+ }
+ }
+
+ GET(uri, etag, nofollow, function (er, remoteData, raw, response) {
+ if (response) {
+ log.silly([response.statusCode, response.headers], "get cb")
+ if (response.statusCode === 304 && etag) {
+ remoteData = data
+ log.verbose(uri+" from cache", "etag")
+ }
+ }
+
+ data = remoteData
+ if (er) return cb(er, data, raw, response)
+
+ // just give the write the old college try. if it fails, whatever.
+ function saved () {
+ delete data._etag
+ cb(er, data, raw, response)
+ }
+
+ saveToCache(cache, data, saved)
+ })
+}
+
+function saveToCache (cache, data, saved) {
+ if (cacheStat) {
+ return saveToCache_(cache, data, cacheStat.uid, cacheStat.gid, saved)
+ }
+ fs.stat(npm.cache, function (er, st) {
+ if (er) {
+ return fs.stat(process.env.HOME || "", function (er, st) {
+ // if this fails, oh well.
+ if (er) return saved()
+ cacheStat = st
+ return saveToCache(cache, data, saved)
+ })
+ }
+ cacheStat = st || { uid: null, gid: null }
+ return saveToCache(cache, data, saved)
+ })
+}
+
+function saveToCache_ (cache, data, uid, gid, saved) {
+ mkdir(path.dirname(cache), npm.modes.exec, uid, gid, function (er) {
+ if (er) return saved()
+ fs.writeFile(cache, JSON.stringify(data), function (er) {
+ if (er || uid === null || gid === null) {
+ return saved()
+ }
+ fs.chown(cache, uid, gid, saved)
+ })
+ })
+}
diff --git a/deps/npm/lib/utils/npm-registry-client/index.js b/deps/npm/lib/utils/npm-registry-client/index.js
new file mode 100644
index 0000000000..2a42947450
--- /dev/null
+++ b/deps/npm/lib/utils/npm-registry-client/index.js
@@ -0,0 +1,16 @@
+
+// utilities for working with the js-registry site.
+
+var cached = {}
+function lazyGet (p) { return function () {
+ return cached[p] || (cached[p] = require("./"+p+".js"))
+}}
+
+function setLazyGet (p) {
+ Object.defineProperty(exports, p,
+ { get : lazyGet(p)
+ , enumerable : true })
+}
+
+; ["publish", "unpublish", "tag", "adduser", "get", "request", "star"]
+ .forEach(setLazyGet)
diff --git a/deps/npm/lib/utils/npm-registry-client/publish.js b/deps/npm/lib/utils/npm-registry-client/publish.js
new file mode 100644
index 0000000000..86cfdc6a89
--- /dev/null
+++ b/deps/npm/lib/utils/npm-registry-client/publish.js
@@ -0,0 +1,163 @@
+
+module.exports = publish
+
+var request = require("./request.js")
+ , GET = request.GET
+ , PUT = request.PUT
+ , DELETE = request.DELETE
+ , reg = request.reg
+ , upload = request.upload
+ , log = require("../log.js")
+ , path = require("path")
+ , npm = require("../../npm.js")
+ , url = require("url")
+
+function publish (data, prebuilt, readme, cb) {
+ if (typeof readme === "function") cb = readme, readme = null
+ if (typeof prebuilt === "function") cb = prebuilt, prebuilt = null
+ // add the dist-url to the data, pointing at the tarball.
+ // if the {name} isn't there, then create it.
+ // if the {version} is already there, then fail.
+ // then:
+ // PUT the data to {config.registry}/{data.name}/{data.version}
+ var registry = reg()
+ if (registry instanceof Error) return cb(registry)
+
+ var fullData =
+ { _id : data.name
+ , name : data.name
+ , description : data.description
+ , "dist-tags" : {}
+ , versions : {}
+ , readme: readme ? "" + readme : null
+ , maintainers :
+ [ { name : npm.config.get("username")
+ , email : npm.config.get("email")
+ }
+ ]
+ }
+
+ var tbName = data.name + "-" + data.version + ".tgz"
+ , bd = npm.config.get("bindist")
+ , pbName = data.name + "-" + data.version + "-" + bd + ".tgz"
+ , tbURI = data.name + "/-/" + tbName
+ , pbURI = data.name + "/-/" + pbName
+
+ data._id = data.name+"@"+data.version
+ data.dist = data.dist || {}
+ data.dist.tarball = url.resolve(registry, tbURI)
+ .replace(/^https:\/\//, "http://")
+
+ if (prebuilt && bd) {
+ data.dist.bin[bd] = data.dist.bin[bd] || {}
+ data.dist.bin[bd].tarball = url.resolve(registry, pbURI)
+ .replace(/^https:\/\//, "http://")
+ }
+
+
+
+
+ // first try to just PUT the whole fullData, and this will fail if it's
+ // already there, because it'll be lacking a _rev, so couch'll bounce it.
+ PUT(encodeURIComponent(data.name), fullData,
+ function (er, parsed, json, response) {
+ // get the rev and then upload the attachment
+ // a 409 is expected here, if this is a new version of an existing package.
+ if (er
+ && !(response && response.statusCode === 409)
+ && !( parsed
+ && parsed.reason ===
+ "must supply latest _rev to update existing package" )) {
+ return log.er(cb, "Failed PUT response "
+ +(response && response.statusCode))(er)
+ }
+ var dataURI = encodeURIComponent(data.name)
+ + "/" + encodeURIComponent(data.version)
+
+ var tag = data.tag || npm.config.get("tag")
+ if (npm.config.get("pre")) dataURI += "/-pre/true"
+ else if (tag) dataURI += "/-tag/" + tag
+ else dataURI += "/-tag/latest"
+
+ // let's see what verions are already published.
+ // could be that we just need to update the bin dist values.
+ GET(data.name, function (er, fullData) {
+ if (er) return cb(er)
+
+ var exists = fullData.versions && fullData.versions[data.version]
+ if (exists) {
+ log(exists._id, "Already published")
+ var ebin = exists.dist.bin || {}
+ , nbin = data.dist.bin || {}
+ , needs = Object.keys(nbin).filter(function (bd) {
+ return !ebin.hasOwnProperty(bd)
+ })
+ log.verbose(needs, "uploading bin dists")
+ if (!needs.length) return cb(conflictError(data._id))
+ // attach the needed bindists, upload the new metadata
+ exists.dist.bin = ebin
+ needs.forEach(function (bd) { exists.dist.bin[bd] = nbin[bd] })
+ return PUT(dataURI + "/-rev/" + fullData._rev, exists, function (er) {
+ if (er) return cb(er)
+ attach(data.name, prebuilt, pbName, cb)
+ })
+ }
+
+ PUT(dataURI, data, function (er) {
+ if (er) {
+ if (er.message.indexOf("conflict Document update conflict.") === 0) {
+ return cb(conflictError(data._id))
+ }
+ return log.er(cb, "Error sending version data")(er)
+ }
+
+ var c = path.resolve(npm.cache, data.name, data.version)
+ , tb = path.resolve(c, "package.tgz")
+
+ cb = rollbackFailure(data, cb)
+
+ log.verbose([data.name, tb, tbName], "attach 2")
+ attach(data.name, tb, tbName, function (er) {
+ log.verbose([er, data.name, prebuilt, pbName], "attach 3")
+ if (er || !prebuilt) return cb(er)
+ attach(data.name, prebuilt, pbName, cb)
+ })
+ })
+ })
+ })
+}
+
+function conflictError (pkgid) {
+ var e = new Error("publish fail")
+ e.errno = npm.EPUBLISHCONFLICT
+ e.pkgid = pkgid
+ return e
+}
+
+function attach (doc, file, filename, cb) {
+ doc = encodeURIComponent(doc)
+ GET(doc, function (er, d) {
+ if (er) return cb(er)
+ if (!d) return cb(new Error(
+ "Attempting to upload to invalid doc "+doc))
+ var rev = "-rev/"+d._rev
+ , attURI = doc + "/-/" + encodeURIComponent(filename) + "/" + rev
+ log.verbose([attURI, file], "uploading")
+ upload(attURI, file, cb)
+ })
+}
+
+function rollbackFailure (data, cb) { return function (er) {
+ if (!er) return cb()
+ npm.ROLLBACK = true
+ log.error(er, "publish failed")
+ log("rollback", "publish failed")
+ npm.commands.unpublish([data.name+"@"+data.version], function (er_) {
+ if (er_) {
+ log.error(er_, "rollback failed")
+ log.error( "Invalid data in registry! Please report this."
+ , "rollback failed" )
+ } else log("rolled back", "publish failed")
+ cb(er)
+ })
+}}
diff --git a/deps/npm/lib/utils/npm-registry-client/request.js b/deps/npm/lib/utils/npm-registry-client/request.js
new file mode 100644
index 0000000000..d19e3ac31f
--- /dev/null
+++ b/deps/npm/lib/utils/npm-registry-client/request.js
@@ -0,0 +1,242 @@
+module.exports = regRequest
+
+regRequest.GET = GET
+regRequest.PUT = PUT
+regRequest.reg = reg
+regRequest.upload = upload
+
+var npm = require("../../npm.js")
+ , url = require("url")
+ , log = require("../log.js")
+ , fs = require("graceful-fs")
+ , rm = require("rimraf")
+ , asyncMap = require("slide").asyncMap
+ , warnedAuth = false
+ , newloctimeout = 0
+ , stream = require("stream")
+ , Stream = stream.Stream
+ , request = require("request")
+ , getAgent = require("../get-agent.js")
+
+function regRequest (method, where, what, etag, nofollow, cb_) {
+ if (typeof cb_ !== "function") cb_ = nofollow, nofollow = false
+ if (typeof cb_ !== "function") cb_ = etag, etag = null
+ if (typeof cb_ !== "function") cb_ = what, what = null
+
+ log.verbose(where||"/", method)
+
+ // Since there are multiple places where an error could occur,
+ // don't let the cb be called more than once.
+ var errState = null
+ function cb (er) {
+ if (errState) return
+ if (er) errState = er
+ cb_.apply(null, arguments)
+ }
+
+ if (where.match(/^\/?favicon.ico/)) {
+ return cb(new Error("favicon.ico isn't a package, it's a picture."))
+ }
+
+ var registry = reg()
+ if (registry instanceof Error) return cb(registry)
+
+ var adduserChange = /^\/?-\/user\/org\.couchdb\.user:([^\/]+)\/-rev/
+ , adduserNew = /^\/?-\/user\/org\.couchdb\.user:([^\/]+)/
+ , authRequired = (what || npm.config.get("always-auth"))
+ && !where.match(adduserNew)
+ || where.match(adduserChange)
+ || method === "DELETE"
+
+ // resolve to a full url on the registry
+ if (!where.match(/^https?:\/\//)) {
+ log.verbose(where, "raw, before any munging")
+
+ var q = where.split("?")
+ where = q.shift()
+ q = q.join("?")
+
+ if (where.charAt(0) !== "/") where = "/" + where
+ where = "." + where.split("/").map(function (p) {
+ p = p.trim()
+ if (p.match(/^org.couchdb.user/)) {
+ return p.replace(/\//g, encodeURIComponent("/"))
+ }
+ return encodeURIComponent(p)
+ }).join("/")
+ if (q) where += "?" + q
+ log.verbose([registry, where], "url resolving")
+ where = url.resolve(registry, where)
+ log.verbose(where, "url resolved")
+ }
+
+ var remote = url.parse(where)
+ , auth = authRequired && npm.config.get("_auth")
+
+ if (authRequired && !auth) {
+ return cb(new Error(
+ "Cannot insert data into the registry without authorization\n"
+ + "See: npm-adduser(1)"))
+ }
+
+ if (auth) remote.auth = new Buffer(auth, "base64").toString("utf8")
+
+ makeRequest(method, remote, where, what, etag, nofollow, cb)
+}
+
+function makeRequest (method, remote, where, what, etag, nofollow, cb) {
+ var opts = { url: remote
+ , method: method
+ , agent: getAgent(remote)
+ , strictSSL: npm.config.get("strict-ssl") }
+ , headers = opts.headers = {}
+ if (etag) {
+ log.verbose(etag, "etag")
+ headers[method === "GET" ? "if-none-match" : "if-match"] = etag
+ }
+
+ headers.accept = "application/json"
+
+ opts.proxy = npm.config.get( remote.protocol === "https:"
+ ? "https-proxy" : "proxy" )
+
+ // figure out wth 'what' is
+ if (what) {
+ if (Buffer.isBuffer(what) || typeof what === "string") {
+ opts.body = what
+ headers["content-type"] = "application/json"
+ headers["content-length"] = Buffer.byteLength(what)
+ } else if (what instanceof Stream) {
+ headers["content-type"] = "application/octet-stream"
+ if (what.size) headers["content-length"] = what.size
+ } else {
+ delete what._etag
+ opts.json = what
+ }
+ }
+
+ if (nofollow) {
+ opts.followRedirect = false
+ }
+
+ var req = request(opts, requestDone(method, where, cb))
+ var r = npm.config.get("registry")
+ if (!r) {
+ return new Error("Must define registry URL before accessing registry.")
+ }
+
+ req.on("error", cb)
+
+ if (what && (what instanceof Stream)) {
+ what.pipe(req)
+ }
+}
+
+// cb(er, parsed, raw, response)
+function requestDone (method, where, cb) { return function (er, response, data) {
+ if (er) return cb(er)
+
+ var parsed
+
+ if (Buffer.isBuffer(data)) {
+ data = data.toString()
+ }
+
+ if (data && typeof data === "string" && response.statusCode !== 304) {
+ try {
+ parsed = JSON.parse(data)
+ } catch (ex) {
+ ex.message += "\n" + data
+ log.verbose(data, "bad json")
+ log.error("error parsing json", "registry")
+ return cb(ex, null, data, response)
+ }
+ } else if (data) {
+ parsed = data
+ data = JSON.stringify(parsed)
+ }
+
+ // expect data with any error codes
+ if (!data && response.statusCode >= 400) {
+ return cb( response.statusCode + " "
+ + require("http").STATUS_CODES[response.statusCode]
+ , null, data, response )
+ }
+
+ var er = null
+ if (parsed && response.headers.etag) {
+ parsed._etag = response.headers.etag
+ }
+
+ if (parsed && parsed.error && response.statusCode >= 400) {
+ var w = url.parse(where).pathname.substr(1)
+ if (!w.match(/^-/) && parsed.error === "not_found") {
+ w = w.split("/")
+ name = w[w.indexOf("_rewrite") + 1]
+ er = new Error("404 Not Found: "+name)
+ er.errno = npm.E404
+ er.pkgid = name
+ } else {
+ er = new Error(
+ parsed.error + " " + (parsed.reason || "") + ": " + w)
+ }
+ } else if (method !== "HEAD" && method !== "GET") {
+ // invalidate cache
+ // This is irrelevant for commands that do etag caching, but
+ // ls and view also have a timed cache, so this keeps the user
+ // from thinking that it didn't work when it did.
+ // Note that failure is an acceptable option here, since the
+ // only result will be a stale cache for some helper commands.
+ var path = require("path")
+ , p = url.parse(where).pathname.split("/")
+ , _ = "/"
+ , caches = p.map(function (part) {
+ return _ = path.join(_, part)
+ }).map(function (cache) {
+ return path.join(npm.cache, cache, ".cache.json")
+ })
+
+ // if the method is DELETE, then also remove the thing itself.
+ // Note that the search index is probably invalid. Whatever.
+ // That's what you get for deleting stuff. Don't do that.
+ if (method === "DELETE") {
+ p = p.slice(0, p.indexOf("-rev"))
+ caches.push(path.join(npm.cache, p.join("/")))
+ }
+
+ asyncMap(caches, rm, function () {})
+ }
+ return cb(er, parsed, data, response)
+}}
+
+function GET (where, etag, nofollow, cb) {
+ regRequest("GET", where, null, etag, nofollow, cb)
+}
+
+function PUT (where, what, etag, nofollow, cb) {
+ regRequest("PUT", where, what, etag, nofollow, cb)
+}
+
+function upload (where, filename, etag, nofollow, cb) {
+ if (typeof nofollow === "function") cb = nofollow, nofollow = false
+ if (typeof etag === "function") cb = etag, etag = null
+
+ fs.stat(filename, function (er, stat) {
+ if (er) return cb(er)
+ var s = fs.createReadStream(filename)
+ s.size = stat.size
+ s.on("error", cb)
+
+ PUT(where, s, etag, nofollow, cb)
+ })
+}
+
+function reg () {
+ var r = npm.config.get("registry")
+ if (!r) {
+ return new Error("Must define registry URL before accessing registry.")
+ }
+ if (r.substr(-1) !== "/") r += "/"
+ npm.config.set("registry", r)
+ return r
+}
diff --git a/deps/npm/lib/utils/npm-registry-client/star.js b/deps/npm/lib/utils/npm-registry-client/star.js
new file mode 100644
index 0000000000..474a1786d4
--- /dev/null
+++ b/deps/npm/lib/utils/npm-registry-client/star.js
@@ -0,0 +1,32 @@
+
+module.exports = star
+
+var request = require("./request.js")
+ , GET = request.GET
+ , PUT = request.PUT
+ , log = require("../log.js")
+ , npm = require("../../npm.js")
+
+function star (package, starred, cb) {
+ var users = {}
+
+ GET(package, function (er, fullData) {
+ if (er) return cb(er)
+
+ fullData = { _id: fullData._id
+ , _rev: fullData._rev
+ , users: fullData.users || {} }
+
+ if (starred) {
+ log.info("starring", fullData._id)
+ fullData.users[npm.config.get("username")] = true
+ log.verbose(fullData)
+ } else {
+ delete fullData.users[npm.config.get("username")]
+ log.info("unstarring", fullData._id)
+ log.verbose(fullData)
+ }
+
+ return PUT(package, fullData, cb)
+ })
+}
diff --git a/deps/npm/lib/utils/npm-registry-client/tag.js b/deps/npm/lib/utils/npm-registry-client/tag.js
new file mode 100644
index 0000000000..4d88a98dba
--- /dev/null
+++ b/deps/npm/lib/utils/npm-registry-client/tag.js
@@ -0,0 +1,8 @@
+
+module.exports = tag
+
+var PUT = require("./request.js").PUT
+
+function tag (project, version, tag, cb) {
+ PUT(project+"/"+tag, JSON.stringify(version), cb)
+}
diff --git a/deps/npm/lib/utils/npm-registry-client/unpublish.js b/deps/npm/lib/utils/npm-registry-client/unpublish.js
new file mode 100644
index 0000000000..0c7c449912
--- /dev/null
+++ b/deps/npm/lib/utils/npm-registry-client/unpublish.js
@@ -0,0 +1,98 @@
+
+// fetch the data
+// modify to remove the version in question
+// If no versions remaining, then DELETE
+// else, PUT the modified data
+// delete the tarball
+
+module.exports = unpublish
+
+var request = require("./request.js")
+ , log = require("../log.js")
+ , get = require("./get.js")
+ , semver = require("semver")
+ , url = require("url")
+ , chain = require("slide").chain
+
+function unpublish (name, ver, cb) {
+ if (!cb) cb = ver, ver = null
+ if (!cb) throw new Error(
+ "Not enough arguments for registry unpublish")
+
+ get(name, null, -1, true, function (er, data) {
+ if (er) return log(name+" not published", "unpublish", cb)
+ // remove all if no version specified
+ if (!ver) {
+ log("No version specified, removing all", "unpublish")
+ return request("DELETE", name+'/-rev/'+data._rev, cb)
+ }
+
+ var versions = data.versions || {}
+ , versionPublic = versions.hasOwnProperty(ver)
+
+ if (!versionPublic) log(name+"@"+ver+" not published", "unpublish")
+ else {
+ var dist = versions[ver].dist
+ log.verbose(dist, "removing attachments")
+ }
+
+ delete versions[ver]
+ // if it was the only version, then delete the whole package.
+ if (!Object.keys(versions).length) {
+ log("No versions remain, removing entire package", "unpublish")
+ return request("DELETE", name+"/-rev/"+data._rev, cb)
+ }
+
+ if (!versionPublic) return cb()
+
+ var latestVer = data["dist-tags"].latest
+ for (var tag in data["dist-tags"]) {
+ if (data["dist-tags"][tag] === ver) delete data["dist-tags"][tag]
+ }
+
+ if (latestVer === ver) {
+ data["dist-tags"].latest =
+ Object.getOwnPropertyNames(versions).sort(semver.compare).pop()
+ }
+
+ var rev = data._rev
+ delete data._revisions
+ delete data._attachments
+ // log(data._rev, "rev")
+ request.PUT(name+"/-rev/"+rev, data,
+ log.er(detacher(data, dist, cb), "Failed to update the data"))
+ })
+}
+
+function detacher (data, dist, cb) { return function (er) {
+ if (er) return cb(er)
+ get(data.name, function (er, data) {
+ if (er) return cb(er)
+
+ var tb = url.parse(dist.tarball)
+
+ detach(data, tb.pathname, data._rev, function (er) {
+ if (er || !dist.bin) return cb(er)
+ chain(Object.keys(dist.bin).map(function (bt) {
+ return function (cb) {
+ var d = dist.bin[bt]
+ detach(data, url.parse(d.tarball).pathname, null, cb)
+ }
+ }), cb)
+ })
+ })
+}}
+
+function detach (data, path, rev, cb) {
+ if (rev) {
+ path += "/-rev/" + rev
+ log(path, "detach")
+ return request("DELETE", path, cb)
+ }
+ get(data.name, function (er, data) {
+ rev = data._rev
+ if (!rev) return cb(new Error(
+ "No _rev found in "+data._id))
+ detach(data, path, rev, cb)
+ })
+}
diff --git a/deps/npm/lib/utils/output.js b/deps/npm/lib/utils/output.js
new file mode 100644
index 0000000000..00da9f69f7
--- /dev/null
+++ b/deps/npm/lib/utils/output.js
@@ -0,0 +1,156 @@
+
+// centralized stdout writer.
+
+exports.doColor = doColor
+exports.write = write
+
+var npm = require("../npm.js")
+ , tty = require("tty")
+ , streams = {}
+ , ttys = {}
+ , net = require("net")
+ , util = require("util")
+ , deadStreams = {}
+
+function doColor (stream) {
+ var conf = npm.config.get("color")
+ return (!conf) ? false
+ : (conf === "always") ? true
+ : isatty(stream)
+}
+function isatty (stream) {
+ // console.error("isatty?", stream)
+ if (!tty.isatty) return true
+ if (!stream) return false
+ if (stream.isTTY) return true
+ if (stream && (typeof stream.fd === "number")) {
+ stream.isTTY = tty.isatty(stream.fd)
+ }
+ return stream.isTTY
+}
+
+function write (args, stream, lf, cb) {
+ // console.error("write", [args, stream, lf, cb])
+ if (typeof cb !== "function" && typeof lf === "function") {
+ cb = lf
+ lf = null
+ }
+ if (typeof cb !== "function" && typeof stream === "function") {
+ cb = stream
+ stream = npm.config.get("outfd")
+ }
+
+ stream = getStream(stream)
+ // console.error("gotStream", stream)
+ if (lf == null) lf = isatty(stream)
+ if (!stream) return cb && cb(), false
+ if (!Array.isArray(args)) args = [args]
+
+ // console.error("write", args)
+
+ var msg = ""
+ , colored = doColor(stream)
+ msg = args.map(function (arg) {
+ if (typeof arg !== "string") {
+ return util.inspect(arg, false, 5, colored) + "\n"
+ }
+ if (!colored) arg = arg.replace(/\033\[[0-9;]*m/g, '')
+ if (!npm.config.get("unicode")) {
+ arg = arg.replace(/â””/g, "`")
+ .replace(/─/g, "-")
+ .replace(/├/g, "+")
+ .replace(/┬/g, "-")
+ }
+ return arg
+ }).join(" ")
+
+ // listen to the "output" event to cancel/modify/redirect
+ npm.output = {stream:stream, message:msg}
+ npm.emit("output", npm.output)
+ if (!npm.output) return cb && cb(), false // cancelled
+ stream = npm.output.stream
+ msg = npm.output.message
+
+ // EPIPE errors just mean that the stream is not listening
+ // any more. Mark the stream as dead, and return.
+ if (deadStreams[stream.fd]) {
+ return cb && cb(), false
+ }
+ if (!deadStreams.hasOwnProperty(stream.fd)) {
+ deadStreams[stream.fd] = false
+ stream.on("error", function (er) {
+ if (er.code === "EPIPE") {
+ deadStreams[stream.fd] = true
+ return cb && cb()
+ }
+ if (stream.listeners("error").length === 1) {
+ throw er
+ }
+ })
+ }
+
+ // use the \r\n in case we're in raw mode.
+ msg = msg.split(/\r?\n/).concat("").join(lf ? "\r\n" : "\n")
+ // output to stderr should be synchronous
+ if (stream === process.stderr || stream.fd === 2) {
+ process.stderr.write(msg)
+ if (cb) cb()
+ return true
+ }
+ // console.error("writing ", msg)
+ var flushed = stream.write(msg)
+ if (flushed && cb) {
+ process.nextTick(cb)
+ } else if (cb) {
+ stream.once("drain", cb)
+ }
+ return flushed
+}
+
+var hadError = false
+function getStream (fd) {
+ if (hadError) return
+
+ var stream
+ if (!fd && fd !== 0) return
+ if (typeof fd === "string") fd = +fd
+
+ // console.error("getStream", fd, hadError)
+
+ if (fd && typeof fd === "object") {
+ stream = fd
+ fd = fd.fd
+ } else if (streams[fd]) {
+ stream = streams[fd]
+ } else {
+ switch (fd) {
+ case 1:
+ stream = process.stdout
+ stream.fd = fd
+ stream.writable = true
+ break
+ case 2:
+ stream = process.stderr
+ stream.fd = fd
+ stream.writable = true
+ break
+ default:
+ try {
+ stream = new net.Stream(fd)
+ if (!stream || !stream.writable) {
+ throw new Error("Stream not writable")
+ }
+ } catch (ex) {
+ // if this fails, then regular logging is most likely broken.
+ var er = new Error("cannot output to fd "+fd + ": "+
+ (ex.stack || ex.message).substr(7) + "\n")
+ console.error(er.stack)
+ hadError = true
+ process.exit(1)
+ }
+ }
+ }
+
+ if (!stream || !stream.writable) return
+ return streams[fd] = stream
+}
diff --git a/deps/npm/lib/utils/promise-chain.js b/deps/npm/lib/utils/promise-chain.js
new file mode 100644
index 0000000000..c206c91ecb
--- /dev/null
+++ b/deps/npm/lib/utils/promise-chain.js
@@ -0,0 +1,39 @@
+
+module.exports = promiseChain
+
+// usage:
+//
+// promiseChain(cb) <-- this is the callback for eventual success or error
+// ( fn, [arg, arg, arg], function (a,b,c) { success(a,b,c) })
+// ( fn2, [args] )
+// () <-- this kicks it off.
+//
+// promiseChain.call(someObj, cb) <-- bind this-context for all functions
+
+function promiseChain (cb) {
+ var steps = []
+ , vals = []
+ , context = this
+ function go () {
+ var step = steps.shift()
+ if (!step) return cb()
+ try { step[0].apply(context, step[1]) }
+ catch (ex) { cb(ex) }
+ }
+ return function pc (fn, args, success) {
+ if (arguments.length === 0) return go()
+ // add the step
+ steps.push
+ ( [ fn
+ , (args || []).concat([ function (er) {
+ if (er) return cb(er)
+ var a = Array.prototype.slice.call(arguments, 1)
+ try { success && success.apply(context, a) }
+ catch (ex) { return cb(ex) }
+ go()
+ }])
+ ]
+ )
+ return pc
+ }
+}
diff --git a/deps/npm/lib/utils/prompt.js b/deps/npm/lib/utils/prompt.js
new file mode 100644
index 0000000000..d23e4ac587
--- /dev/null
+++ b/deps/npm/lib/utils/prompt.js
@@ -0,0 +1,69 @@
+
+module.exports = prompt
+
+var log = require("./log.js")
+ , buffer = ""
+ , tty = require("tty")
+
+function prompt (p, def, silent, cb) {
+ if (!cb) cb = silent, silent = false
+ if (!cb) cb = def, def = null
+ if (def) p += "("+(silent ? "<hidden>" : def)+") "
+ var r = (silent ? silentRead : read).bind(null, def, cb)
+ if (!process.stdout.write(p)) process.stdout.on("drain", function D () {
+ process.stdout.removeListener("drain", D)
+ r()
+ })
+ else r()
+}
+
+function read (def, cb) {
+ var stdin = process.openStdin()
+ , val = ""
+ stdin.resume()
+ stdin.setEncoding("utf8")
+ stdin.on("error", cb)
+ stdin.on("data", function D (chunk) {
+ val += buffer + chunk
+ buffer = ""
+ val = val.replace(/\r/g, '')
+ if (val.indexOf("\n") !== -1) {
+ if (val !== "\n") val = val.replace(/^\n+/, "")
+ buffer = val.substr(val.indexOf("\n"))
+ val = val.substr(0, val.indexOf("\n"))
+ stdin.pause()
+ stdin.removeListener("data", D)
+ stdin.removeListener("error", cb)
+ val = val.trim() || def
+ cb(null, val)
+ }
+ })
+}
+
+function silentRead (def, cb) {
+ var stdin = process.openStdin()
+ , val = ""
+ tty.setRawMode(true)
+ stdin.resume()
+ stdin.on("error", cb)
+ stdin.on("data", function D (c) {
+ c = "" + c
+ switch (c) {
+ case "\n": case "\r": case "\r\n": case "\u0004":
+ tty.setRawMode(false)
+ stdin.removeListener("data", D)
+ stdin.removeListener("error", cb)
+ val = val.trim() || def
+ process.stdout.write("\n")
+ stdin.pause()
+ return cb(null, val)
+ case "\u0003": case "\0":
+ return cb("cancelled")
+ break
+ default:
+ val += buffer + c
+ buffer = ""
+ break
+ }
+ })
+}
diff --git a/deps/npm/lib/utils/read-installed.js b/deps/npm/lib/utils/read-installed.js
new file mode 100644
index 0000000000..59fc6c490b
--- /dev/null
+++ b/deps/npm/lib/utils/read-installed.js
@@ -0,0 +1,295 @@
+
+// Walk through the file-system "database" of installed
+// packages, and create a data object related to the
+// installed versions of each package.
+
+/*
+This will traverse through all node_modules folders,
+resolving the dependencies object to the object corresponding to
+the package that meets that dep, or just the version/range if
+unmet.
+
+Assuming that you had this folder structure:
+
+/path/to
++-- package.json { name = "root" }
+`-- node_modules
+ +-- foo {bar, baz, asdf}
+ | +-- node_modules
+ | +-- bar { baz }
+ | `-- baz
+ `-- asdf
+
+where "foo" depends on bar, baz, and asdf, bar depends on baz,
+and bar and baz are bundled with foo, whereas "asdf" is at
+the higher level (sibling to foo), you'd get this object structure:
+
+{ <package.json data>
+, path: "/path/to"
+, parent: null
+, dependencies:
+ { foo :
+ { version: "1.2.3"
+ , path: "/path/to/node_modules/foo"
+ , parent: <Circular: root>
+ , dependencies:
+ { bar:
+ { parent: <Circular: foo>
+ , path: "/path/to/node_modules/foo/node_modules/bar"
+ , version: "2.3.4"
+ , dependencies: { baz: <Circular: foo.dependencies.baz> }
+ }
+ , baz: { ... }
+ , asdf: <Circular: asdf>
+ }
+ }
+ , asdf: { ... }
+ }
+}
+
+Unmet deps are left as strings.
+Extraneous deps are marked with extraneous:true
+deps that don't meet a requirement are marked with invalid:true
+
+to READ(packagefolder, parentobj, name, reqver)
+obj = read package.json
+installed = ./node_modules/*
+if parentobj is null, and no package.json
+ obj = {dependencies:{<installed>:"*"}}
+deps = Object.keys(obj.dependencies)
+obj.path = packagefolder
+obj.parent = parentobj
+if name, && obj.name !== name, obj.invalid = true
+if reqver, && obj.version !satisfies reqver, obj.invalid = true
+if !reqver && parentobj, obj.extraneous = true
+for each folder in installed
+ obj.dependencies[folder] = READ(packagefolder+node_modules+folder,
+ obj, folder, obj.dependencies[folder])
+# walk tree to find unmet deps
+for each dep in obj.dependencies not in installed
+ r = obj.parent
+ while r
+ if r.dependencies[dep]
+ if r.dependencies[dep].verion !satisfies obj.dependencies[dep]
+ WARN
+ r.dependencies[dep].invalid = true
+ obj.dependencies[dep] = r.dependencies[dep]
+ r = null
+ else r = r.parent
+return obj
+
+
+TODO:
+1. Find unmet deps in parent directories, searching as node does up
+as far as the left-most node_modules folder.
+2. Ignore anything in node_modules that isn't a package folder.
+
+*/
+
+
+var npm = require("../npm.js")
+ , fs = require("graceful-fs")
+ , path = require("path")
+ , asyncMap = require("slide").asyncMap
+ , semver = require("semver")
+ , readJson = require("./read-json.js")
+ , log = require("./log.js")
+
+module.exports = readInstalled
+
+function readInstalled (folder, cb) {
+ var d = npm.config.get("depth")
+ readInstalled_(folder, null, null, null, 0, d, function (er, obj) {
+ if (er) return cb(er)
+ // now obj has all the installed things, where they're installed
+ // figure out the inheritance links, now that the object is built.
+ resolveInheritance(obj)
+ cb(null, obj)
+ })
+}
+
+var rpSeen = {}
+function readInstalled_ (folder, parent, name, reqver, depth, maxDepth, cb) {
+ //console.error(folder, name)
+
+ var installed
+ , obj
+ , real
+ , link
+
+ fs.readdir(path.resolve(folder, "node_modules"), function (er, i) {
+ // error indicates that nothing is installed here
+ if (er) i = []
+ installed = i.filter(function (f) { return f.charAt(0) !== "." })
+ next()
+ })
+
+ readJson(path.resolve(folder, "package.json"), function (er, data) {
+ obj = data
+ if (!parent) {
+ obj = obj || true
+ er = null
+ }
+ return next(er)
+ })
+
+ fs.lstat(folder, function (er, st) {
+ if (er) {
+ if (!parent) real = true
+ return next(er)
+ }
+ fs.realpath(folder, function (er, rp) {
+ //console.error("realpath(%j) = %j", folder, rp)
+ real = rp
+ if (st.isSymbolicLink()) link = rp
+ next(er)
+ })
+ })
+
+ var errState = null
+ , called = false
+ function next (er) {
+ if (errState) return
+ if (er) {
+ errState = er
+ return cb(null, [])
+ }
+ //console.error('next', installed, obj && typeof obj, name, real)
+ if (!installed || !obj || !real || called) return
+ called = true
+ if (rpSeen[real]) return cb(null, rpSeen[real])
+ if (obj === true) {
+ obj = {dependencies:{}, path:folder}
+ installed.forEach(function (i) { obj.dependencies[i] = "*" })
+ }
+ if (name && obj.name !== name) obj.invalid = true
+ obj.realName = name || obj.name
+ obj.dependencies = obj.dependencies || {}
+
+ // "foo":"http://blah" is always presumed valid
+ if (reqver
+ && semver.validRange(reqver)
+ && !semver.satisfies(obj.version, reqver)) {
+ obj.invalid = true
+ }
+
+ if (parent
+ && !(name in parent.dependencies)
+ && !(name in (parent.devDependencies || {}))) {
+ obj.extraneous = true
+ }
+ obj.path = obj.path || folder
+ obj.realPath = real
+ obj.link = link
+ if (parent && !obj.link) obj.parent = parent
+ rpSeen[real] = obj
+ obj.depth = depth
+ if (depth >= maxDepth) return cb(null, obj)
+ asyncMap(installed, function (pkg, cb) {
+ var rv = obj.dependencies[pkg]
+ if (!rv && obj.devDependencies) rv = obj.devDependencies[pkg]
+ readInstalled_( path.resolve(folder, "node_modules/"+pkg)
+ , obj, pkg, obj.dependencies[pkg], depth + 1, maxDepth
+ , cb )
+ }, function (er, installedData) {
+ if (er) return cb(er)
+ installedData.forEach(function (dep) {
+ obj.dependencies[dep.realName] = dep
+ })
+ return cb(null, obj)
+ })
+ }
+}
+
+// starting from a root object, call findUnmet on each layer of children
+var riSeen = []
+function resolveInheritance (obj) {
+ if (typeof obj !== "object") return
+ if (riSeen.indexOf(obj) !== -1) return
+ riSeen.push(obj)
+ if (typeof obj.dependencies !== "object") {
+ obj.dependencies = {}
+ }
+ Object.keys(obj.dependencies).forEach(function (dep) {
+ findUnmet(obj.dependencies[dep])
+ })
+ Object.keys(obj.dependencies).forEach(function (dep) {
+ resolveInheritance(obj.dependencies[dep])
+ })
+}
+
+// find unmet deps by walking up the tree object.
+// No I/O
+var fuSeen = []
+function findUnmet (obj) {
+ if (fuSeen.indexOf(obj) !== -1) return
+ fuSeen.push(obj)
+ //console.error("find unmet", obj.name, obj.parent && obj.parent.name)
+ var deps = obj.dependencies = obj.dependencies || {}
+ //console.error(deps)
+ Object.keys(deps)
+ .filter(function (d) { return typeof deps[d] === "string" })
+ .forEach(function (d) {
+ //console.error("find unmet", obj.name, d, deps[d])
+ var r = obj.parent
+ , found = null
+ while (r && !found && typeof deps[d] === "string") {
+ // if r is a valid choice, then use that.
+ found = r.dependencies[d]
+ if (!found && r.realName === d) found = r
+
+ if (!found) {
+ r = r.link ? null : r.parent
+ continue
+ }
+ if ( typeof deps[d] === "string"
+ && !semver.satisfies(found.version, deps[d])) {
+ // the bad thing will happen
+ log.warn(obj.path + " requires "+d+"@'"+deps[d]
+ +"' but will load\n"
+ +found.path+",\nwhich is version "+found.version
+ ,"unmet dependency")
+ found.invalid = true
+ }
+ deps[d] = found
+ }
+ })
+ log.verbose([obj._id], "returning")
+ return obj
+}
+
+if (module === require.main) {
+ var util = require("util")
+ console.error("testing")
+
+ var called = 0
+ readInstalled(process.cwd(), function (er, map) {
+ console.error(called ++)
+ if (er) return console.error(er.stack || er.message)
+ cleanup(map)
+ console.error(util.inspect(map, true, 10, true))
+ })
+
+ var seen = []
+ function cleanup (map) {
+ if (seen.indexOf(map) !== -1) return
+ seen.push(map)
+ for (var i in map) switch (i) {
+ case "_id":
+ case "path":
+ case "extraneous": case "invalid":
+ case "dependencies": case "name":
+ continue
+ default: delete map[i]
+ }
+ var dep = map.dependencies
+// delete map.dependencies
+ if (dep) {
+// map.dependencies = dep
+ for (var i in dep) if (typeof dep[i] === "object") {
+ cleanup(dep[i])
+ }
+ }
+ return map
+ }
+}
diff --git a/deps/npm/lib/utils/read-json.js b/deps/npm/lib/utils/read-json.js
new file mode 100644
index 0000000000..e71128c1b1
--- /dev/null
+++ b/deps/npm/lib/utils/read-json.js
@@ -0,0 +1,496 @@
+
+module.exports = readJson
+readJson.processJson = processJson
+readJson.unParsePeople = unParsePeople
+readJson.parsePeople = parsePeople
+readJson.clearCache = clearCache
+
+var fs = require("graceful-fs")
+ , semver = require("semver")
+ , path = require("path")
+ , log = require("./log.js")
+ , npm = require("../npm.js")
+ , cache = {}
+ , timers = {}
+ , loadPackageDefaults = require("./load-package-defaults.js")
+
+function readJson (jsonFile, opts, cb) {
+ if (typeof cb !== "function") cb = opts, opts = {}
+ if (cache.hasOwnProperty(jsonFile)) {
+ log.verbose(jsonFile, "from cache")
+ return cb(null, cache[jsonFile])
+ }
+ opts.file = jsonFile
+ if (!opts.tag) {
+ var parsedPath = jsonFile.indexOf(npm.dir) === 0 && jsonFile.match(
+ /\/([^\/]+)\/([^\/]+)\/package\/package\.json$/)
+ if (parsedPath && semver.valid(parsedPath[2])) {
+ // this is a package.json in some installed package.
+ // infer the opts.tag so that linked packages behave right.
+ opts.tag = parsedPath[2]
+ }
+ }
+
+ var wscript = null
+ , contributors = null
+ , serverjs = null
+
+ if (opts.wscript != null) {
+ wscript = opts.wscript
+ next()
+ } else fs.readFile( path.join(path.dirname(jsonFile), "wscript")
+ , function (er, data) {
+ if (er) opts.wscript = false
+ else opts.wscript = !!(data.toString().match(/(^|\n)def build\b/)
+ && data.toString().match(/(^|\n)def configure\b/))
+ wscript = opts.wscript
+ next()
+ })
+
+ if (opts.contributors != null) {
+ contributors = opts.contributors
+ next()
+ } else fs.readFile( path.join(path.dirname(jsonFile), "AUTHORS")
+ , function (er, data) {
+ if (er) opts.contributors = false
+ else {
+ data = data.toString().split(/\r?\n/).map(function (l) {
+ l = l.trim().split("#").shift()
+ return l
+ }).filter(function (l) { return l })
+ opts.contributors = data
+ }
+ contributors = opts.contributors
+ next()
+ })
+
+ if (opts.serverjs != null) {
+ serverjs = opts.serverjs
+ next()
+ } else fs.stat( path.join(path.dirname(jsonFile), "server.js")
+ , function (er, st) {
+ if (er) opts.serverjs = false
+ else opts.serverjs = st.isFile()
+ serverjs = opts.serverjs
+ next()
+ })
+
+ function next () {
+ if (wscript === null
+ || contributors === null
+ || serverjs === null) {
+ return
+ }
+
+ fs.readFile(jsonFile, processJson(opts, function (er, data) {
+ if (er) return cb(er)
+ var doLoad = !(jsonFile.indexOf(npm.cache) === 0 &&
+ path.basename(path.dirname(jsonFile)) !== "package")
+ if (!doLoad) return cb(er, data)
+ loadPackageDefaults(data, path.dirname(jsonFile), cb)
+ }))
+ }
+}
+
+function processJson (opts, cb) {
+ if (typeof cb !== "function") cb = opts, opts = {}
+ if (typeof cb !== "function") {
+ var thing = cb, cb = null
+ return P(null, thing)
+ } else return P
+
+ function P (er, thing) {
+ if (er) {
+ if (cb) return cb(er, thing)
+ throw er
+ }
+ if (typeof thing === "object" && !Buffer.isBuffer(thing)) {
+ return processObject(opts, cb)(er, thing)
+ } else {
+ return processJsonString(opts, cb)(er, thing)
+ }
+ }
+}
+
+function processJsonString (opts, cb) { return function (er, jsonString) {
+ jsonString += ""
+ if (er) return cb(er, jsonString)
+ var json
+ try {
+ json = JSON.parse(jsonString)
+ } catch (ex) {
+ if (opts.file && opts.file.indexOf(npm.dir) === 0) {
+ try {
+ json = require("vm").runInNewContext("(\n"+jsonString+"\n)")
+ log.error(opts.file, "Error parsing json")
+ log.error(ex, "parse error ")
+ } catch (ex2) {
+ return jsonParseFail(ex, opts.file, cb)
+ }
+ } else {
+ return jsonParseFail(ex, opts.file, cb)
+ }
+ }
+ return processObject(opts, cb)(er, json)
+}}
+
+
+function jsonParseFail (ex, file, cb) {
+ var e = new Error(
+ "Failed to parse json\n"+ex.message)
+ e.errno = npm.EJSONPARSE
+ e.file = file
+ if (cb) return cb(e)
+ throw e
+}
+
+// a warning for deprecated or likely-incorrect fields
+var typoWarned = {}
+function typoWarn (json) {
+ if (typoWarned[json._id]) return
+ typoWarned[json._id] = true
+
+ if (json.modules) {
+ log.warn("package.json: 'modules' object is deprecated", json._id)
+ delete json.modules
+ }
+
+ // http://registry.npmjs.org/-/fields
+ var typos = { "dependancies": "dependencies"
+ , "dependecies": "dependencies"
+ , "depdenencies": "dependencies"
+ , "devEependencies": "devDependencies"
+ , "depends": "dependencies"
+ , "devDependences": "devDependencies"
+ , "devDepenencies": "devDependencies"
+ , "devdependencies": "devDependencies"
+ , "repostitory": "repository"
+ , "prefereGlobal": "preferGlobal"
+ , "hompage": "homepage"
+ , "hampage": "homepage" // XXX maybe not a typo, just delicious?
+ , "autohr": "author"
+ , "autor": "author"
+ , "contributers": "contributors"
+ , "publicationConfig": "publishConfig"
+ }
+
+ Object.keys(typos).forEach(function (d) {
+ if (json.hasOwnProperty(d)) {
+ log.warn( "package.json: '" + d + "' should probably be '"
+ + typos[d] + "'", json._id)
+ }
+ })
+
+ // bugs typos
+ var bugsTypos = { "web": "url"
+ , "name": "url"
+ }
+
+ if (typeof json.bugs === "object") {
+ Object.keys(bugsTypos).forEach(function (d) {
+ if (json.bugs.hasOwnProperty(d)) {
+ log.warn( "package.json: bugs['" + d + "'] should probably be "
+ + "bugs['" + bugsTypos[d] + "']", json._id)
+ }
+ })
+ }
+
+ // script typos
+ var scriptTypos = { "server": "start" }
+ if (json.scripts) Object.keys(scriptTypos).forEach(function (d) {
+ if (json.scripts.hasOwnProperty(d)) {
+ log.warn( "package.json: scripts['" + d + "'] should probably be "
+ + "scripts['" + scriptTypos[d] + "']", json._id)
+ }
+ })
+}
+
+
+function processObject (opts, cb) { return function (er, json) {
+ // json._npmJsonOpts = opts
+ // log.warn(json, "processing json")
+ if (npm.config.get("username")) {
+ json._npmUser = { name: npm.config.get("username")
+ , email: npm.config.get("email") }
+ }
+
+ // slashes would be a security risk.
+ // anything else will just fail harmlessly.
+ if (!json.name) {
+ var e = new Error("No 'name' field found in package.json")
+ if (cb) return cb(e)
+ throw e
+ }
+ json.name = json.name.trim()
+ if (json.name.charAt(0) === "." || json.name.match(/[\/@\s\+%:]/)) {
+ var msg = "Invalid name: "
+ + JSON.stringify(json.name)
+ + " may not start with '.' or contain %/@+: or whitespace"
+ , e = new Error(msg)
+ if (cb) return cb(e)
+ throw e
+ }
+ if (json.name.toLowerCase() === "node_modules") {
+ var msg = "Invalid package name: node_modules"
+ , e = new Error(msg)
+ if (cb) return cb(e)
+ throw e
+ }
+ if (json.name.toLowerCase() === "favicon.ico") {
+ var msg = "Sorry, favicon.ico is a picture, not a package."
+ , e = new Error(msg)
+ if (cb) return cb(e)
+ throw e
+ }
+
+ if (json.repostories) {
+ var msg = "'repositories' (plural) No longer supported.\n"
+ + "Please pick one, and put it in the 'repository' field."
+ , e = new Error(msg)
+ // uncomment once this is no longer an issue.
+ // if (cb) return cb(e)
+ // throw e
+ log.error(msg, "incorrect json: "+json.name)
+ json.repostory = json.repositories[0]
+ delete json.repositories
+ }
+
+ if (json.repository) {
+ if (typeof json.repository === "string") {
+ json.repository = { type : "git"
+ , url : json.repository }
+ }
+ var repo = json.repository.url || ""
+ repo = repo.replace(/^(https?|git):\/\/[^\@]+\@github.com/
+ ,'$1://github.com')
+ if (json.repository.type === "git"
+ && ( repo.match(/^https?:\/\/github.com/)
+ || repo.match(/github.com\/[^\/]+\/[^\/]+\/?$/)
+ && !repo.match(/\.git$/)
+ )) {
+ repo = repo.replace(/^https?:\/\/github.com/, 'git://github.com')
+ if (!repo.match(/\.git$/)) {
+ repo = repo.replace(/\/?$/, '.git')
+ }
+ }
+ if (repo.match(/github\.com\/[^\/]+\/[^\/]+\/?$/)
+ && repo.match(/\.git\.git$/)) {
+ log.warn(repo, "Probably broken git url")
+ }
+ json.repository.url = repo
+ }
+
+ var files = json.files
+ if (files && !Array.isArray(files)) {
+ log.warn(files, "Invalid 'files' member. See 'npm help json'")
+ delete json.files
+ }
+
+ var kw = json.keywords
+ if (typeof kw === "string") {
+ kw = kw.split(/,\s+/)
+ json.keywords = kw
+ }
+
+ json._id = json.name+"@"+json.version
+
+ var tag = opts.tag
+ if (tag) json.version = tag
+
+ var scripts = json.scripts || {}
+
+ // if it has a wscript, then build it.
+ if (opts.wscript && !json.prebuilt) {
+ log.verbose([json.prebuilt, opts], "has wscript")
+ if (!scripts.install && !scripts.preinstall) {
+ // don't fail if it was unexpected, just try.
+ scripts.preinstall = "node-waf clean || true; node-waf configure build"
+ json.scripts = scripts
+ }
+ }
+
+ // if it has an AUTHORS, then credit them
+ if (opts.contributors && Array.isArray(opts.contributors)
+ && opts.contributors.length) {
+ json.contributors = opts.contributors
+ }
+
+ // if it has a server.js, then start it.
+ if (opts.serverjs && !scripts.start) {
+ scripts.start = "node server.js"
+ json.scripts = scripts
+ }
+
+ if (!(semver.valid(json.version))) {
+ var m
+ if (!json.version) {
+ m = "'version' field missing\n"
+ } else {
+ m = "Invalid 'version' field: "+json.version+"\n"
+ }
+
+ m += "'version' Must be X.Y.Z, with an optional trailing tag.\n"
+ + "See the section on 'version' in `npm help json`"
+
+ var e = new Error(m)
+ if (cb) return cb(e)
+ throw e
+ }
+ json.version = semver.clean(json.version)
+
+ if (json.bin && typeof json.bin === "string") {
+ var b = {}
+ b[ json.name ] = json.bin
+ json.bin = b
+ }
+
+ if (json.bundledDependencies && !json.bundleDependencies) {
+ json.bundleDependencies = json.bundledDependencies
+ delete json.bundledDependencies
+ }
+
+ if (json.bundleDependencies && !Array.isArray(json.bundleDependencies)) {
+ var e = new Error("bundleDependencies must be an array.\n"
+ +"See `npm help json`")
+ if (cb) return cb(e)
+ throw e
+ }
+
+ if (json["dev-dependencies"] && !json.devDependencies) {
+ json.devDependencies = json["dev-dependencies"]
+ delete json["dev-dependencies"]
+ }
+
+ ;["dependencies", "devDependencies"].forEach(function (d) {
+ json[d] = json[d] ? depObjectify(json[d]) : {}
+ })
+
+ if (opts.dev || npm.config.get("dev") || npm.config.get("npat")) {
+ // log.warn(json._id, "Adding devdeps")
+ Object.keys(json.devDependencies || {}).forEach(function (d) {
+ json.dependencies[d] = json.devDependencies[d]
+ })
+ // log.warn(json.dependencies, "Added devdeps")
+ }
+
+ typoWarn(json)
+
+ json = testEngine(json)
+ json = parsePeople(unParsePeople(json))
+ if ( json.bugs ) json.bugs = parsePerson(unParsePerson(json.bugs))
+ json._npmVersion = npm.version
+ json._nodeVersion = process.version
+ if (opts.file) {
+ log.verbose(opts.file, "caching")
+ cache[opts.file] = json
+ // arbitrary
+ var keys = Object.keys(cache)
+ , l = keys.length
+ if (l > 10000) for (var i = 0; i < l - 5000; i ++) {
+ delete cache[keys[i]]
+ }
+ }
+ if (cb) cb(null,json)
+ return json
+}}
+
+function depObjectify (deps) {
+ if (!Array.isArray(deps)) return deps
+ var o = {}
+ deps.forEach(function (d) {
+ d = d.trim().split(/(:?[@\s><=])/)
+ o[d.shift()] = d.join("").trim().replace(/^@/, "")
+ })
+ return o
+}
+
+function testEngine (json) {
+ // if engines is empty, then assume that node is allowed.
+ if ( !json.engines
+ || Array.isArray(json.engines)
+ && !json.engines.length
+ || typeof json.engines === "object"
+ && !Object.keys(json.engines).length
+ ) {
+ json.engines = { "node" : "*" }
+ }
+ if (typeof json.engines === "string") {
+ if (semver.validRange(json.engines) !== null) {
+ json.engines = { "node" : json.engines }
+ } else json.engines = [ json.engines ]
+ }
+
+ var nodeVer = npm.config.get("node-version")
+ , ok = false
+ if (nodeVer) nodeVer = nodeVer.replace(/\+$/, '')
+ if (Array.isArray(json.engines)) {
+ // Packages/1.0 commonjs style, with an array.
+ // hack it to just hang a "node" member with the version range,
+ // then do the npm-style check below.
+ for (var i = 0, l = json.engines.length; i < l; i ++) {
+ var e = json.engines[i].trim()
+ if (e.substr(0, 4) === "node") {
+ json.engines.node = e.substr(4)
+ } else if (e.substr(0, 3) === "npm") {
+ json.engines.npm = e.substr(3)
+ }
+ }
+ }
+ if (json.engines.node === "") json.engines.node = "*"
+ if (json.engines.node && null === semver.validRange(json.engines.node)) {
+ log.warn( json.engines.node
+ , "Invalid range in engines.node. Please see `npm help json`" )
+ }
+
+ if (nodeVer) {
+ json._engineSupported = semver.satisfies( nodeVer
+ , json.engines.node || "null" )
+ }
+ if (json.engines.hasOwnProperty("npm") && json._engineSupported) {
+ json._engineSupported = semver.satisfies(npm.version, json.engines.npm)
+ }
+ return json
+}
+
+function unParsePeople (json) { return parsePeople(json, true) }
+
+function parsePeople (json, un) {
+ var fn = un ? unParsePerson : parsePerson
+ if (json.author) json.author = fn(json.author)
+ ;["maintainers", "contributors"].forEach(function (set) {
+ if (Array.isArray(json[set])) json[set] = json[set].map(fn)
+ })
+ return json
+}
+
+function unParsePerson (person) {
+ if (typeof person === "string") return person
+ var name = person.name || ""
+ , u = person.url || person.web
+ , url = u ? (" ("+u+")") : ""
+ , e = person.email || person.mail
+ , email = e ? (" <"+e+">") : ""
+ return name+email+url
+}
+
+function parsePerson (person) {
+ if (typeof person !== "string") return person
+ var name = person.match(/^([^\(<]+)/)
+ , url = person.match(/\(([^\)]+)\)/)
+ , email = person.match(/<([^>]+)>/)
+ , obj = {}
+ if (name && name[0].trim()) obj.name = name[0].trim()
+ if (email) obj.email = email[1]
+ if (url) obj.url = url[1]
+ return obj
+}
+
+function clearCache (prefix) {
+ if (!prefix) {
+ cache = {}
+ return
+ }
+ Object.keys(cache).forEach(function (c) {
+ if (c.indexOf(prefix) === 0) delete cache[c]
+ })
+}
diff --git a/deps/npm/lib/utils/relativize.js b/deps/npm/lib/utils/relativize.js
new file mode 100644
index 0000000000..74c45c5d3e
--- /dev/null
+++ b/deps/npm/lib/utils/relativize.js
@@ -0,0 +1,70 @@
+
+module.exports = relativize
+
+// return the shortest path between two folders.
+// if the original path is shorter, then use that,
+// unless forceRelative is set to true.
+var path = require("path")
+function relativize (dest, src, forceRelative) {
+ var orig = dest
+ if (!isAbsolute(src)) forceRelative = true
+ else if (!isAbsolute(dest)) return false
+ src = path.resolve(src)
+ dest = path.resolve(dest)
+ if (src === dest) return "."
+ src = src.split(split)
+ dest = dest.split(split)
+ var i = 0
+ while (src[i] === dest[i]) i++
+ if (!forceRelative && i === 1) return orig // nothing in common
+ src.splice(0, i + 1)
+ var dots = [0, i, "."]
+ for (var i = 0, l = src.length; i < l; i ++) dots.push("..")
+ dest.splice.apply(dest, dots)
+ if (dest[0] === "." && dest[1] === "..") dest.shift()
+ dest = dest.join("/")
+ return !forceRelative && orig.length < dest.length ? orig : dest
+}
+
+var split = process.platform === "win32" ? /[\/\\]/ : "/"
+
+function isAbsolute (p) {
+ if (process.platform !== "win32") return p.charAt(0) === "/"
+ return path.resolve(p) === p
+}
+
+if (module === require.main) {
+ // from, to, result, relativeForced
+ var assert = require("assert")
+
+ ; [ ["/bar" ,"/foo" ,"/bar" ,"./bar" ]
+ , ["/foo/baz" ,"/foo/bar/baz" ,"../baz" ,"../baz" ]
+ , ["/a/d" ,"/a/b/c/d/e/f" ,"/a/d" ,"../../../../d" ]
+ // trailing slashes are ignored.
+ , ["/a/d" ,"/a/b/c/d/e/" ,"/a/d" ,"../../../d" ]
+ , ["./foo/bar" ,"./foo/baz" ,"./bar" ,"./bar" ]
+ // force relative when the src is relative.
+ , ["./d" ,"./a/b/c/d/e" ,"../../../../d" ,"../../../../d" ]
+ // if src is abs and dest is relative, then fail
+ , ["./d" ,"/a/b" ,false ,false ]
+ ].forEach(function (test) {
+ var d = test[0]
+ , s = test[1]
+ , r = test[2]
+ , rr = test[3]
+ , ra = relativize(d, s)
+ , rra = relativize(d, s, true)
+ console.log([d, s, r, rr], [ra, rra], [r === ra, rr === rra])
+ assert.equal(r, ra)
+ assert.equal(rr, rra)
+ if (!r) return
+ // contract: this is the relative path from absolute A to absolute B
+ var ad = path.resolve(d)
+ , as = path.resolve(s)
+ , dir = path.dirname(as)
+ assert.equal(path.resolve(dir, rr), ad)
+ assert.equal(path.resolve(dir, r), ad)
+ })
+
+ console.log("ok")
+}
diff --git a/deps/npm/lib/utils/set.js b/deps/npm/lib/utils/set.js
new file mode 100644
index 0000000000..4d9241aab7
--- /dev/null
+++ b/deps/npm/lib/utils/set.js
@@ -0,0 +1,25 @@
+
+module.exports = set
+var get = require("./get.js")
+ , processJson = require("./read-json.js").processJson
+function set (obj, key, val) {
+ for (var i in obj) {
+ if (i.toLowerCase() === key.toLowerCase()) return obj[i] = val
+ }
+ obj[key] = val
+ if (!val) return
+ // if it's a package set, then assign all the versions.
+ if (val.versions) return Object.keys(val.versions).forEach(function (v) {
+ if (typeof val.versions[v] !== "object") return
+ set(obj, key+"@"+v, val.versions[v])
+ })
+ // Note that this doesn't put the dist-tags there, only updates the versions
+ if (key === val.name+"@"+val.version) {
+ processJson(val)
+ var reg = get(obj, val.name) || {}
+ reg.name = reg._id = val.name
+ set(obj, val.name, reg)
+ reg.versions = get(reg, "versions") || {}
+ if (!get(reg.versions, val.version)) set(reg.versions, val.version, val)
+ }
+}
diff --git a/deps/npm/lib/utils/sha.js b/deps/npm/lib/utils/sha.js
new file mode 100644
index 0000000000..17b8c38f9c
--- /dev/null
+++ b/deps/npm/lib/utils/sha.js
@@ -0,0 +1,51 @@
+
+var fs = require("graceful-fs")
+ , crypto = require("crypto")
+ , log = require("./log.js")
+ , binding
+
+try { binding = process.binding("crypto") }
+catch (e) { binding = null }
+
+exports.check = check
+exports.get = get
+
+function check (file, sum, cb) {
+ if (!binding) {
+ log.warn("crypto binding not found. Cannot verify shasum.", "shasum")
+ return cb()
+ }
+ get(file, function (er, actual) {
+ if (er) return log.er(cb, "Error getting shasum")(er)
+ var expected = sum.toLowerCase().trim()
+ , ok = actual === expected
+ cb(ok ? null : new Error(
+ "shasum check failed for "+file+"\n"
+ +"Expected: "+expected+"\n"
+ +"Actual: "+actual))
+ })
+}
+
+function get (file, cb) {
+ if (!binding) {
+ log.warn("crypto binding not found. Cannot verify shasum.", "shasum")
+ return cb()
+ }
+ var h = crypto.createHash("sha1")
+ , s = fs.createReadStream(file)
+ , errState = null
+ s.on("error", function (er) {
+ if (errState) return
+ log.silly(er.stack || er.message, "sha error")
+ return cb(errState = er)
+ }).on("data", function (chunk) {
+ if (errState) return
+ log.silly(chunk.length, "updated sha bytes")
+ h.update(chunk)
+ }).on("end", function () {
+ if (errState) return
+ var actual = h.digest("hex").toLowerCase().trim()
+ log(actual+"\n"+file, "shasum")
+ cb(null, actual)
+ })
+}
diff --git a/deps/npm/lib/utils/tar.js b/deps/npm/lib/utils/tar.js
new file mode 100644
index 0000000000..d6db6ea528
--- /dev/null
+++ b/deps/npm/lib/utils/tar.js
@@ -0,0 +1,551 @@
+// XXX lib/cache.js and this file need to be rewritten.
+
+// commands for packing and unpacking tarballs
+// this file is used by lib/cache.js
+
+var npm = require("../npm.js")
+ , fs = require("graceful-fs")
+ , exec = require("./exec.js")
+ , find = require("./find.js")
+ , mkdir = require("./mkdir-p.js")
+ , asyncMap = require("slide").asyncMap
+ , path = require("path")
+ , log = require("./log.js")
+ , uidNumber = require("./uid-number.js")
+ , rm = require("rimraf")
+ , readJson = require("./read-json.js")
+ , relativize = require("./relativize.js")
+ , cache = require("../cache.js")
+ , excludes = require("./excludes.js")
+ , myUid = process.getuid && process.getuid()
+ , myGid = process.getgid && process.getgid()
+ , tar = require("tar")
+ , zlib = require("zlib")
+ , fstream = require("fstream")
+
+exports.pack = pack
+exports.unpack = unpack
+exports.makeList = makeList
+
+function pack (targetTarball, folder, pkg, dfc, cb) {
+ if (typeof cb !== "function") cb = dfc, dfc = true
+ folder = path.resolve(process.cwd(), folder)
+ if (typeof pkg === "function") {
+ cb = pkg, pkg = null
+ return readJson(path.resolve(folder, "package.json"), function (er, pkg) {
+ if (er) return log.er(cb, "Couldn't find package.json in "+folder)(er)
+ pack(targetTarball, folder, pkg, dfc, cb)
+ })
+ }
+ log.verbose(folder+" "+targetTarball, "pack")
+ var parent = path.dirname(folder)
+ , addFolder = path.basename(folder)
+
+ var confEx = npm.config.get("ignore")
+ log.silly(folder, "makeList")
+ makeList(folder, pkg, dfc, function (er, files, cleanup) {
+ if (er) return cb(er)
+ return packFiles(targetTarball, parent, files, pkg, function (er) {
+ if (!cleanup || !cleanup.length) return cb(er)
+ // try to be a good citizen, even/especially in the event of failure.
+ cleanupResolveLinkDep(cleanup, function (er2) {
+ if (er || er2) {
+ if (er) log(er, "packing tarball")
+ if (er2) log(er2, "while cleaning up resolved deps")
+ }
+ return cb(er || er2)
+ })
+ })
+ })
+}
+
+function packFiles (targetTarball, parent, files, pkg, cb) {
+
+ var p
+
+ files = files.map(function (f) {
+ p = f.split(/\/|\\/)[0]
+ return path.resolve(parent, f)
+ })
+
+ parent = path.resolve(parent, p)
+
+ log.verbose(targetTarball, "tarball")
+ log.verbose(parent, "parent")
+ fstream.Reader({ type: "Directory"
+ , path: parent
+ , filter: function () {
+ return -1 !== files.indexOf(this.path)
+ // || (this.type === "Directory" &&
+ // this.basename !== ".git")
+
+ }
+ })
+ .on("error", log.er(cb, "error reading "+parent))
+ .on("entry", function E (entry) {
+ entry.on("entry", E)
+ })
+ .pipe(tar.Pack({}))
+ .on("error", log.er(cb, "tar creation error "+targetTarball))
+ .pipe(zlib.Gzip())
+ .on("error", log.er(cb, "gzip error "+targetTarball))
+ .pipe(fstream.Writer({ type: "File", path: targetTarball }))
+ .on("error", log.er(cb, "Could not write "+targetTarball))
+ .on("close", cb)
+}
+
+
+function unpack (tarball, unpackTarget, dMode, fMode, uid, gid, cb) {
+ if (typeof cb !== "function") cb = gid, gid = null
+ if (typeof cb !== "function") cb = uid, uid = null
+ if (typeof cb !== "function") cb = fMode, fMode = npm.modes.file
+ if (typeof cb !== "function") cb = dMode, dMode = npm.modes.exec
+
+ uidNumber(uid, gid, function (er, uid, gid) {
+ if (er) return cb(er)
+ unpack_(tarball, unpackTarget, dMode, fMode, uid, gid, cb)
+ })
+}
+
+function unpack_ ( tarball, unpackTarget, dMode, fMode, uid, gid, cb ) {
+ // If the desired target is /path/to/foo,
+ // then unpack into /path/to/.foo.npm/{something}
+ // rename that to /path/to/foo, and delete /path/to/.foo.npm
+ var parent = path.dirname(unpackTarget)
+ , base = path.basename(unpackTarget)
+ , tmp = path.resolve(parent, "___" + base + ".npm")
+
+ mkdir(tmp, dMode || npm.modes.exec, uid, gid, function (er) {
+ log.verbose([uid, gid], "unpack_ uid, gid")
+ log.verbose(unpackTarget, "unpackTarget")
+ if (er) return log.er(cb, "Could not create "+tmp)(er)
+ // cp the gzip of the tarball, pipe the stdout into tar's stdin
+ // gzip {tarball} --decompress --stdout \
+ // | tar -mvxpf - --strip-components=1 -C {unpackTarget}
+ gunzTarPerm( tarball, tmp
+ , dMode, fMode
+ , uid, gid
+ , function (er, folder) {
+ if (er) return cb(er)
+ log.verbose(folder, "gunzed")
+
+ rm(unpackTarget, function (er) {
+ if (er) return cb(er)
+ log.verbose(unpackTarget, "rm'ed")
+ fs.rename(folder, unpackTarget, function (er) {
+ if (er) return cb(er)
+ log.verbose([folder, unpackTarget], "renamed")
+ // curse you, nfs! It will lie and tell you that the
+ // mv is done, when in fact, it isn't. In theory,
+ // reading the file should cause it to wait until it's done.
+ readJson( path.resolve(unpackTarget, "package.json")
+ , function (er, data) {
+ // now we read the json, so we know it's there.
+ rm(tmp, function (er2) { cb(er || er2, data) })
+ })
+ })
+ })
+ })
+ })
+}
+
+function gunzTarPerm (tarball, tmp, dMode, fMode, uid, gid, cb) {
+ if (!dMode) dMode = npm.modes.exec
+ if (!fMode) fMode = npm.modes.file
+ log.silly([dMode.toString(8), fMode.toString(8)], "gunzTarPerm modes")
+
+ fs.createReadStream(tarball)
+ .pipe(zlib.Unzip())
+ .on("error", log.er(cb, "unzip error"))
+ .pipe(tar.Extract({ type: "Directory", path: tmp }))
+ .on("error", log.er(cb, "Failed unpacking "+tarball))
+ .on("close", afterUntar)
+
+ //
+ // XXX Do all this in an Extract filter.
+ //
+ function afterUntar (er) {
+ // if we're not doing ownership management,
+ // then we're done now.
+ if (er) return log.er(cb, "Failed unpacking "+tarball)(er)
+
+ // HACK skip on windows
+ if (npm.config.get("unsafe-perm") && process.platform !== "win32") {
+ uid = process.getuid()
+ gid = process.getgid()
+ if (uid === 0) {
+ if (process.env.SUDO_UID) uid = +process.env.SUDO_UID
+ if (process.env.SUDO_GID) gid = +process.env.SUDO_GID
+ }
+ }
+
+ if (process.platform === "win32") {
+ return fs.readdir(tmp, function (er, files) {
+ files = files.filter(function (f) {
+ return f && f.indexOf("\0") === -1
+ })
+ cb(er, files && path.resolve(tmp, files[0]))
+ })
+ }
+
+ find(tmp, function (f) {
+ return f !== tmp
+ }, function (er, files) {
+ if (er) return cb(er)
+ asyncMap(files, function (f, cb) {
+ f = path.resolve(f)
+ log.silly(f, "asyncMap in gTP")
+ fs.lstat(f, function (er, stat) {
+
+ if (er || stat.isSymbolicLink()) return cb(er)
+ if (typeof uid === "number" && typeof gid === "number") {
+ fs.chown(f, uid, gid, chown)
+ } else chown()
+
+ function chown (er) {
+ if (er) return cb(er)
+ var mode = stat.isDirectory() ? dMode : fMode
+ , oldMode = stat.mode & 0777
+ , newMode = (oldMode | mode) & (~npm.modes.umask)
+ if (mode && newMode !== oldMode) {
+ log.silly(newMode.toString(8), "chmod "+path.basename(f))
+ fs.chmod(f, newMode, cb)
+ } else cb()
+ }
+ })
+ }, function (er) {
+
+ if (er) return cb(er)
+ if (typeof myUid === "number" && typeof myGid === "number") {
+ fs.chown(tmp, myUid, myGid, chown)
+ } else chown()
+
+ function chown (er) {
+ if (er) return cb(er)
+ fs.readdir(tmp, function (er, folder) {
+ folder = folder && folder.filter(function (f) {
+ return f && !f.match(/^\._/)
+ })
+ cb(er, folder && path.resolve(tmp, folder[0]))
+ })
+ }
+ })
+ })
+ }
+}
+
+function makeList (dir, pkg, dfc, cb) {
+ if (typeof cb !== "function") cb = dfc, dfc = true
+ if (typeof cb !== "function") cb = pkg, pkg = null
+ dir = path.resolve(dir)
+
+ if (!pkg.path) pkg.path = dir
+
+ var name = path.basename(dir)
+
+ // since this is a top-level traversal, get the user and global
+ // exclude files, as well as the "ignore" config setting.
+ var confIgnore = npm.config.get("ignore").trim()
+ .split(/[\n\r\s\t]+/)
+ .filter(function (i) { return i.trim() })
+ , userIgnore = npm.config.get("userignorefile")
+ , globalIgnore = npm.config.get("globalignorefile")
+ , userExclude
+ , globalExclude
+
+ confIgnore.dir = dir
+ confIgnore.name = "confIgnore"
+
+ var defIgnore = ["build/"]
+ defIgnore.dir = dir
+
+ // TODO: only look these up once, and cache outside this function
+ excludes.parseIgnoreFile( userIgnore, null, dir
+ , function (er, uex) {
+ if (er) return cb(er)
+ userExclude = uex
+ next()
+ })
+
+ excludes.parseIgnoreFile( globalIgnore, null, dir
+ , function (er, gex) {
+ if (er) return cb(er)
+ globalExclude = gex
+ next()
+ })
+
+ function next () {
+ if (!globalExclude || !userExclude) return
+ var exList = [ defIgnore, confIgnore, globalExclude, userExclude ]
+
+ makeList_(dir, pkg, exList, dfc, function (er, files, cleanup) {
+ if (er) return cb(er)
+ var dirLen = dir.length + 1
+ files = files.map(function (file) {
+ return path.join(name, file.substr(dirLen))
+ })
+ return cb(null, files, cleanup)
+ })
+ }
+}
+
+// Patterns ending in slashes will only match targets
+// ending in slashes. To implement this, add a / to
+// the filename iff it lstats isDirectory()
+function readDir (dir, pkg, dfc, cb) {
+ fs.readdir(dir, function (er, files) {
+ if (er) return cb(er)
+ files = files.filter(function (f) {
+ return f && f.charAt(0) !== "/" && f.indexOf("\0") === -1
+ })
+ asyncMap(files, function (file, cb) {
+ fs.lstat(path.resolve(dir, file), function (er, st) {
+ if (er) return cb(null, [])
+ // if it's a directory, then tack "/" onto the name
+ // so that it can match dir-only patterns in the
+ // include/exclude logic later.
+ if (st.isDirectory()) return cb(null, file + "/")
+
+ // if it's a symlink, then we need to do some more
+ // complex stuff for GH-691
+ if (st.isSymbolicLink()) return readSymlink(dir, file, pkg, dfc, cb)
+
+ // otherwise, just let it on through.
+ return cb(null, file)
+ })
+ }, cb)
+ })
+}
+
+// just see where this link is pointing, and resolve relative paths.
+function shallowReal (link, cb) {
+ link = path.resolve(link)
+ fs.readlink(link, function (er, t) {
+ if (er) return cb(er)
+ return cb(null, path.resolve(path.dirname(link), t), t)
+ })
+}
+
+function readSymlink (dir, file, pkg, dfc, cb) {
+ var isNM = dfc
+ && path.basename(dir) === "node_modules"
+ && path.dirname(dir) === pkg.path
+ // see if this thing is pointing outside of the package.
+ // external symlinks are resolved for deps, ignored for other things.
+ // internal symlinks are allowed through.
+ var df = path.resolve(dir, file)
+ shallowReal(df, function (er, r, target) {
+ if (er) return cb(null, []) // wtf? exclude file.
+ if (r.indexOf(dir) === 0) return cb(null, file) // internal
+ if (!isNM) return cb(null, []) // external non-dep
+ // now the fun stuff!
+ fs.realpath(df, function (er, resolved) {
+ if (er) return cb(null, []) // can't add it.
+ readJson(path.resolve(resolved, "package.json"), function (er) {
+ if (er) return cb(null, []) // not a package
+ resolveLinkDep(dir, file, resolved, target, pkg, function (er, f, c) {
+ cb(er, f, c)
+ })
+ })
+ })
+ })
+}
+
+// put the link back the way it was.
+function cleanupResolveLinkDep (cleanup, cb) {
+ // cut it out of the list, so that cycles will be broken.
+ if (!cleanup) return cb()
+
+ asyncMap(cleanup, function (d, cb) {
+ rm(d[1], function (er) {
+ if (er) return cb(er)
+ fs.symlink(d[0], d[1], cb)
+ })
+ }, cb)
+}
+
+function resolveLinkDep (dir, file, resolved, target, pkg, cb) {
+ // we've already decided that this is a dep that will be bundled.
+ // make sure the data reflects this.
+ var bd = pkg.bundleDependencies || pkg.bundledDependencies || []
+ delete pkg.bundledDependencies
+ pkg.bundleDependencies = bd
+ var f = path.resolve(dir, file)
+ , cleanup = [[target, f, resolved]]
+
+ if (bd.indexOf(file) === -1) {
+ // then we don't do this one.
+ // just move the symlink out of the way.
+ return rm(f, function (er) {
+ cb(er, file, cleanup)
+ })
+ }
+
+ rm(f, function (er) {
+ if (er) return cb(er)
+ cache.add(resolved, function (er, data) {
+ if (er) return cb(er)
+ cache.unpack(data.name, data.version, f, function (er, data) {
+ if (er) return cb(er)
+ // now clear out the cache entry, since it's weird, probably.
+ // pass the cleanup object along so that the thing getting the
+ // list of files knows what to clean up afterwards.
+ cache.clean([data._id], function (er) { cb(er, file, cleanup) })
+ })
+ })
+ })
+}
+
+// exList is a list of ignore lists.
+// Each exList item is an array of patterns of files to ignore
+//
+function makeList_ (dir, pkg, exList, dfc, cb) {
+ var files = null
+ , cleanup = null
+
+ readDir(dir, pkg, dfc, function (er, f, c) {
+ if (er) return cb(er)
+ cleanup = c
+ files = f.map(function (f) {
+ // no nulls in paths!
+ return f.split(/\0/)[0]
+ }).filter(function (f) {
+ // always remove all source control folders and
+ // waf/vim/OSX garbage. this is a firm requirement.
+ return !( f === ".git/"
+ || f === ".lock-wscript"
+ || f === "CVS/"
+ || f === ".svn/"
+ || f === ".hg/"
+ || f.match(/^\..*\.swp/)
+ || f === ".DS_Store"
+ || f.match(/^\._/)
+ || f === "npm-debug.log"
+ || f === ""
+ || f.charAt(0) === "/"
+ )
+ })
+
+ // if (files.length > 0) files.push(".")
+
+ if (files.indexOf("package.json") !== -1 && dir !== pkg.path) {
+ // a package.json file starts the whole exclude/include
+ // logic all over. Otherwise, a parent could break its
+ // deps with its files list or .npmignore file.
+ readJson(path.resolve(dir, "package.json"), function (er, data) {
+ if (!er && typeof data === "object") {
+ data.path = dir
+ return makeList(dir, data, dfc, function (er, files) {
+ // these need to be mounted onto the directory now.
+ cb(er, files && files.map(function (f) {
+ return path.resolve(path.dirname(dir), f)
+ }))
+ })
+ }
+ next()
+ })
+ //next()
+ } else next()
+
+ // add a local ignore file, if found.
+ if (files.indexOf(".npmignore") === -1
+ && files.indexOf(".gitignore") === -1) next()
+ else {
+ excludes.addIgnoreFile( path.resolve(dir, ".npmignore")
+ , ".gitignore"
+ , exList
+ , dir
+ , function (er, list) {
+ if (!er) exList = list
+ next(er)
+ })
+ }
+ })
+
+ var n = 2
+ , errState = null
+ function next (er) {
+ if (errState) return
+ if (er) return cb(errState = er, [], cleanup)
+ if (-- n > 0) return
+
+ if (!pkg) return cb(new Error("No package.json file in "+dir))
+ if (pkg.path === dir && pkg.files) {
+ pkg.files = pkg.files.filter(function (f) {
+ f = f.trim()
+ return f && f.charAt(0) !== "#"
+ })
+ if (!pkg.files.length) pkg.files = null
+ }
+ if (pkg.path === dir && pkg.files) {
+ // stuff on the files list MUST be there.
+ // ignore everything, then include the stuff on the files list.
+ var pkgFiles = ["*"].concat(pkg.files.map(function (f) {
+ return "!" + f
+ }))
+ pkgFiles.dir = dir
+ pkgFiles.packageFiles = true
+ exList.push(pkgFiles)
+ }
+
+ if (path.basename(dir) === "node_modules"
+ && pkg.path === path.dirname(dir)
+ && dfc) { // do fancy crap
+ files = filterNodeModules(files, pkg)
+ } else {
+ // If a directory is excluded, we still need to be
+ // able to *include* a file within it, and have that override
+ // the prior exclusion.
+ //
+ // This whole makeList thing probably needs to be rewritten
+ files = files.filter(function (f) {
+ return excludes.filter(dir, exList)(f) || f.slice(-1) === "/"
+ })
+ }
+
+
+ asyncMap(files, function (file, cb) {
+ // if this is a dir, then dive into it.
+ // otherwise, don't.
+ file = path.resolve(dir, file)
+
+ // in 0.6.0, fs.readdir can produce some really odd results.
+ // XXX: remove this and make the engines hash exclude 0.6.0
+ if (file.indexOf(dir) !== 0) {
+ return cb(null, [])
+ }
+
+ fs.lstat(file, function (er, st) {
+ if (er) return cb(er)
+ if (st.isDirectory()) {
+ return makeList_(file, pkg, exList, dfc, cb)
+ }
+ return cb(null, file)
+ })
+ }, function (er, files, c) {
+ if (c) cleanup = (cleanup || []).concat(c)
+ if (files.length > 0) files.push(dir)
+ return cb(er, files, cleanup)
+ })
+ }
+}
+
+// only include node_modules folder that are:
+// 1. not on the dependencies list or
+// 2. on the "bundleDependencies" list.
+function filterNodeModules (files, pkg) {
+ var bd = pkg.bundleDependencies || pkg.bundledDependencies || []
+ , deps = Object.keys(pkg.dependencies || {})
+ .filter(function (key) { return !pkg.dependencies[key].extraneous })
+ .concat(Object.keys(pkg.devDependencies || {}))
+
+ delete pkg.bundledDependencies
+ pkg.bundleDependencies = bd
+
+ return files.filter(function (f) {
+ f = f.replace(/\/$/, "")
+ return f.charAt(0) !== "."
+ && f.charAt(0) !== "_"
+ && bd.indexOf(f) !== -1
+ })
+}
diff --git a/deps/npm/lib/utils/uid-number.js b/deps/npm/lib/utils/uid-number.js
new file mode 100644
index 0000000000..3756275534
--- /dev/null
+++ b/deps/npm/lib/utils/uid-number.js
@@ -0,0 +1,55 @@
+module.exports = uidNumber
+
+// This module calls into bin/npm-get-uid-gid.js, which sets the
+// uid and gid to the supplied argument, in order to find out their
+// numeric value. This can't be done in the main node process,
+// because otherwise npm would be running as that user.
+
+var exec = require("./exec.js")
+ , path = require("path")
+ , log = require("./log.js")
+ , constants = require("constants")
+ , npm = require("../npm.js")
+ , uidSupport = process.getuid && process.setuid
+ , uidCache = {}
+ , gidCache = {}
+
+function uidNumber (uid, gid, cb) {
+ if (!uidSupport || npm.config.get("unsafe-perm")) return cb()
+ if (typeof cb !== "function") cb = gid, gid = null
+ if (typeof cb !== "function") cb = uid, uid = null
+ if (gid == null) gid = process.getgid()
+ if (uid == null) uid = process.getuid()
+ if (!isNaN(gid)) gid = +gid
+ if (!isNaN(uid)) uid = +uid
+
+ if (uidCache[uid]) uid = uidCache[uid]
+ if (gidCache[gid]) gid = gidCache[gid]
+
+ if (typeof gid === "number" && typeof uid === "number") {
+ return cb(null, uid, gid)
+ }
+
+ var getter = path.join(__dirname, "..", "..", "bin", "npm-get-uid-gid.js")
+ return exec( process.execPath, [getter, uid, gid], process.env, false
+ , null, process.getuid(), process.getgid()
+ , function (er, code, out, err) {
+ if (er) return log.er(cb, "Could not get uid/gid "+err)(er)
+ log.silly(out, "output from getuid/gid")
+ out = JSON.parse(out+"")
+ if (out.error) {
+ if (!npm.config.get("unsafe-perm")) {
+ var er = new Error(out.error)
+ er.errno = out.errno
+ return cb(er)
+ } else {
+ return cb(null, +process.getuid(), +process.getgid())
+ }
+ }
+ if (isNaN(out.uid) || isNaN(out.gid)) return cb(new Error(
+ "Could not get uid/gid: "+JSON.stringify(out)))
+ uidCache[uid] = out.uid
+ uidCache[gid] = out.gid
+ cb(null, out.uid, out.gid)
+ })
+}
diff --git a/deps/npm/lib/version.js b/deps/npm/lib/version.js
new file mode 100644
index 0000000000..e626e474a3
--- /dev/null
+++ b/deps/npm/lib/version.js
@@ -0,0 +1,66 @@
+// npm version <newver>
+
+module.exports = version
+
+var exec = require("./utils/exec.js")
+ , readJson = require("./utils/read-json.js")
+ , semver = require("semver")
+ , path = require("path")
+ , fs = require("graceful-fs")
+ , chain = require("slide").chain
+ , log = require("./utils/log.js")
+ , npm = require("./npm.js")
+
+version.usage = "npm version <newversion> [--message commit-message]"
+ + "\n(run in package dir)\n"
+ + "'npm -v' or 'npm --version' to print npm version "
+ + "("+npm.version+")\n"
+ + "'npm view <pkg> version' to view a package's "
+ + "published version\n"
+ + "'npm ls' to inspect current package/dependency versions"
+
+function version (args, cb) {
+ if (args.length !== 1) return cb(version.usage)
+ readJson(path.join(process.cwd(), "package.json"), function (er, data) {
+ if (er) return log.er(cb, "No package.json found")(er)
+ var newVer = semver.valid(args[0])
+ if (!newVer) newVer = semver.inc(data.version, args[0])
+ if (!newVer) return cb(version.usage)
+ if (data.version === newVer) return cb(new Error("Version not changed"))
+ data.version = newVer
+ Object.keys(data).forEach(function (k) {
+ if (k.charAt(0) === "_") delete data[k]
+ })
+ readJson.unParsePeople(data)
+ fs.stat(path.join(process.cwd(), ".git"), function (er, s) {
+ var doGit = !er && s.isDirectory()
+ if (!doGit) return write(data, cb)
+ else checkGit(data, cb)
+ })
+ })
+}
+function checkGit (data, cb) {
+ exec( "git", ["status", "--porcelain"], process.env, false
+ , function (er, code, stdout, stderr) {
+ var lines = stdout.trim().split("\n").filter(function (line) {
+ return line.trim() && !line.match(/^\?\? /)
+ })
+ if (lines.length) return cb(new Error(
+ "Git working directory not clean.\n"+lines.join("\n")))
+ write(data, function (er) {
+ if (er) return cb(er)
+ var message = npm.config.get("message").replace(/%s/g, data.version)
+ chain
+ ( [ [ exec, "git", ["add","package.json"], process.env, false ]
+ , [ exec, "git", ["commit", "-m", message ]
+ , process.env, false ]
+ , [ exec, "git", ["tag", "v"+data.version], process.env, false ] ]
+ , cb )
+ })
+ })
+}
+function write (data, cb) {
+ fs.writeFile( path.join(process.cwd(), "package.json")
+ , new Buffer(JSON.stringify(data, null, 2))
+ , cb )
+}
diff --git a/deps/npm/lib/view.js b/deps/npm/lib/view.js
new file mode 100644
index 0000000000..3e39f76de9
--- /dev/null
+++ b/deps/npm/lib/view.js
@@ -0,0 +1,244 @@
+// npm view [pkg [pkg ...]]
+
+module.exports = view
+view.usage = "npm view pkg[@version] [<field>[.subfield]...]"
+
+view.completion = function (opts, cb) {
+ if (opts.conf.argv.remain.length <= 2) {
+ return registry.get("/-/short", cb)
+ }
+ // have the package, get the fields.
+ var tag = npm.config.get("tag")
+ registry.get(opts.conf.argv.remain[2], function (er, d) {
+ if (er) return cb(er)
+ var dv = d.versions[d["dist-tags"][tag]]
+ , fields = []
+ d.versions = Object.keys(d.versions).sort(semver.compare)
+ fields = getFields(d).concat(getFields(dv))
+ cb(null, fields)
+ })
+
+ function getFields (d, f, pref) {
+ f = f || []
+ if (!d) return f
+ pref = pref || []
+ Object.keys(d).forEach(function (k) {
+ if (k.charAt(0) === "_" || k.indexOf(".") !== -1) return
+ var p = pref.concat(k).join(".")
+ f.push(p)
+ if (Array.isArray(d[k])) {
+ return d[k].forEach(function (val, i) {
+ var pi = p + "[" + i + "]"
+ if (val && typeof val === "object") getFields(val, f, [p])
+ else f.push(pi)
+ })
+ }
+ if (typeof d[k] === "object") getFields(d[k], f, [p])
+ })
+ return f
+ }
+}
+
+var registry = require("./utils/npm-registry-client/index.js")
+ , ini = require("ini")
+ , log = require("./utils/log.js")
+ , util = require("util")
+ , output
+ , npm = require("./npm.js")
+ , semver = require("semver")
+ , readJson = require("./utils/read-json.js")
+
+function view (args, silent, cb) {
+ if (typeof cb !== "function") cb = silent, silent = false
+ if (!args.length) return cb("Usage: "+view.usage)
+ var pkg = args.shift()
+ , nv = pkg.split("@")
+ , name = nv.shift()
+ , version = nv.join("@") || npm.config.get("tag")
+
+ if (name === ".") return cb(view.usage)
+
+ // get the data about this package
+ registry.get(name, null, 600, function (er, data) {
+ if (er) return cb(er)
+ if (data["dist-tags"].hasOwnProperty(version)) {
+ version = data["dist-tags"][version]
+ }
+ var results = []
+ , error = null
+ , versions = data.versions
+ data.versions = Object.keys(data.versions).sort(semver.compare)
+ if (!args.length) args = [""]
+
+ if (-1 === args.indexOf("readme")) {
+ delete data.readme
+ }
+
+ Object.keys(versions).forEach(function (v) {
+ try {
+ versions[v] = readJson.processJson(versions[v])
+ } catch (ex) {
+ delete versions[v]
+ }
+ if (semver.satisfies(v, version)) args.forEach(function (args) {
+ results.push(showFields(data, versions[v], args))
+ })
+ })
+ results = results.reduce(reducer, {})
+ var retval = results
+
+ if (args.length === 1 && args[0] === "") {
+ retval = cleanBlanks(retval)
+ log.silly(retval, "cleanup")
+ }
+
+ if (error || silent) cb(error, retval)
+ else printData(results, data._id, cb.bind(null, error, retval))
+ })
+}
+
+function cleanBlanks (obj) {
+ var clean = {}
+ Object.keys(obj).forEach(function (version) {
+ clean[version] = obj[version][""]
+ })
+ return clean
+}
+
+function reducer (l, r) {
+ if (r) Object.keys(r).forEach(function (v) {
+ l[v] = l[v] || {}
+ Object.keys(r[v]).forEach(function (t) {
+ l[v][t] = r[v][t]
+ })
+ })
+ return l
+}
+
+// return whatever was printed
+function showFields (data, version, fields) {
+ var o = {}
+ ;[data, version].forEach(function (s) {
+ Object.keys(s).forEach(function (k) {
+ o[k] = s[k]
+ })
+ })
+ return search(o, fields.split("."), version.version, fields)
+}
+
+function search (data, fields, version, title) {
+ var field
+ , tail = fields
+ while (!field && fields.length) field = tail.shift()
+ fields = [field].concat(tail)
+ if (!field && !tail.length) {
+ var o = {}
+ o[version] = {}
+ o[version][title] = data
+ return o
+ }
+ var index = field.match(/(.+)\[([^\]]+)\]$/)
+ if (index) {
+ field = index[1]
+ index = index[2]
+ if (data.field && data.field.hasOwnProperty(index)) {
+ return search(data[field][index], tail, version, title)
+ } else {
+ field = field + "[" + index + "]"
+ }
+ }
+ if (Array.isArray(data)) {
+ if (data.length === 1) {
+ return search(data[0], fields, version, title)
+ }
+ var results = []
+ , res = null
+ data.forEach(function (data, i) {
+ var tl = title.length
+ , newt = title.substr(0, tl-(fields.join(".").length) - 1)
+ + "["+i+"]" + [""].concat(fields).join(".")
+ results.push(search(data, fields.slice(), version, newt))
+ })
+ results = results.reduce(reducer, {})
+ return results
+ }
+ if (!data.hasOwnProperty(field)) {
+ return
+ }
+ data = data[field]
+ if (tail.length) {
+ if (typeof data === "object") {
+ // there are more fields to deal with.
+ return search(data, tail, version, title)
+ } else {
+ return new Error("Not an object: "+data)
+ }
+ }
+ var o = {}
+ o[version] = {}
+ o[version][title] = data
+ return o
+}
+
+function printData (data, name, cb) {
+ var versions = Object.keys(data)
+ , msg = ""
+ , showVersions = versions.length > 1
+ , showFields
+ function cb_ (er) { return cb(er, data) }
+
+ versions.forEach(function (v, i) {
+ var fields = Object.keys(data[v])
+ showFields = showFields || (fields.length > 1)
+ fields.forEach(function (f) {
+ var d = cleanup(data[v][f])
+ if (showVersions || showFields || typeof d !== "string") {
+ d = util.inspect(cleanup(data[v][f]), false, 5, true)
+ }
+ if (f && showFields) f += " = "
+ if (d.indexOf("\n") !== -1) d = "\n" + d
+ msg += (showVersions ? name + "@" + v + " " : "")
+ + (showFields ? f : "") + d + "\n"
+ })
+ })
+ output = output || require("./utils/output.js")
+ output.write(msg, cb_)
+}
+function cleanup (data) {
+ if (Array.isArray(data)) {
+ if (data.length === 1) {
+ data = data[0]
+ } else {
+ return data.map(cleanup)
+ }
+ }
+ if (!data || typeof data !== "object") return data
+
+ if (typeof data.versions === "object"
+ && data.versions
+ && !Array.isArray(data.versions)) {
+ data.versions = Object.keys(data.versions || {})
+ }
+
+ var keys = Object.keys(data)
+ keys.forEach(function (d) {
+ if (d.charAt(0) === "_") delete data[d]
+ else if (typeof data[d] === "object") data[d] = cleanup(data[d])
+ })
+ keys = Object.keys(data)
+ if (keys.length <= 3
+ && data.name
+ && (keys.length === 1
+ || keys.length === 3 && data.email && data.url
+ || keys.length === 2 && (data.email || data.url))) {
+ data = unparsePerson(data)
+ }
+ return data
+}
+function unparsePerson (d) {
+ if (typeof d === "string") return d
+ return d.name
+ + (d.email ? " <"+d.email+">" : "")
+ + (d.url ? " ("+d.url+")" : "")
+}
+
diff --git a/deps/npm/lib/whoami.js b/deps/npm/lib/whoami.js
new file mode 100644
index 0000000000..c48f04b1d6
--- /dev/null
+++ b/deps/npm/lib/whoami.js
@@ -0,0 +1,13 @@
+module.exports = whoami
+
+var npm = require("./npm.js")
+ , output = require("./utils/output.js")
+ , log = require("./utils/log.js")
+
+whoami.usage = "npm whoami\n(just prints the 'username' config)"
+
+function whoami (args, cb) {
+ var me = npm.config.get("username")
+ if (!me) me = "Not authed. Run 'npm adduser'"
+ output.write(me, cb)
+}
diff --git a/deps/npm/lib/xmas.js b/deps/npm/lib/xmas.js
new file mode 100644
index 0000000000..90282d0f7b
--- /dev/null
+++ b/deps/npm/lib/xmas.js
@@ -0,0 +1,54 @@
+// happy xmas
+var npm = require("./npm.js")
+ , log = require("./utils/log.js")
+
+module.exports = function (args, cb) {
+npm.config.set("loglevel", "win")
+var s = process.platform === "win32" ? " *" : " \u2605"
+ , f = "\uFF0F"
+ , b = "\uFF3C"
+ , x = process.platform === "win32" ? " " : ""
+ , o = [ "\u0069" , "\u0020", "\u0020", "\u0020", "\u0020", "\u0020"
+ , "\u0020", "\u0020", "\u0020", "\u0020", "\u0020", "\u0020"
+ , "\u0020", "\u2E1B","\u2042","\u2E2E","&","@","\uFF61" ]
+ , oc = [21,33,34,35,36,37]
+ , l = "\u005e"
+
+function w (s) { process.stderr.write(s) }
+
+w("\n")
+;(function T (H) {
+ for (var i = 0; i < H; i ++) w(" ")
+ w(x+"\033[33m"+s+"\n")
+ var M = H * 2 - 1
+ for (L = 1; L <= H; L ++) {
+ var O = L * 2 - 2
+ var S = (M - O) / 2
+ for (var i = 0; i < S; i ++) w(" ")
+ w(x+"\033[32m"+f)
+ for (var i = 0; i < O; i ++) w(
+ "\033["+oc[Math.floor(Math.random()*oc.length)]+"m"+
+ o[Math.floor(Math.random() * o.length)]
+ )
+ w(x+"\033[32m"+b+"\n")
+ }
+ w(" ")
+ for (var i = 1; i < H; i ++) w("\033[32m"+l)
+ w("| "+x+" |")
+ for (var i = 1; i < H; i ++) w("\033[32m"+l)
+ if (H > 10) {
+ w("\n ")
+ for (var i = 1; i < H; i ++) w(" ")
+ w("| "+x+" |")
+ for (var i = 1; i < H; i ++) w(" ")
+ }
+})(20)
+w("\n\n")
+log.win("Happy Xmas, Noders!", "loves you", cb)
+}
+var dg=false
+Object.defineProperty(module.exports, "usage", {get:function () {
+ if (dg) module.exports([], function () {})
+ dg = true
+ return " "
+}})
diff --git a/deps/npm/node_modules/abbrev/README.md b/deps/npm/node_modules/abbrev/README.md
new file mode 100644
index 0000000000..99746fe67c
--- /dev/null
+++ b/deps/npm/node_modules/abbrev/README.md
@@ -0,0 +1,23 @@
+# abbrev-js
+
+Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).
+
+Usage:
+
+ var abbrev = require("abbrev");
+ abbrev("foo", "fool", "folding", "flop");
+
+ // returns:
+ { fl: 'flop'
+ , flo: 'flop'
+ , flop: 'flop'
+ , fol: 'folding'
+ , fold: 'folding'
+ , foldi: 'folding'
+ , foldin: 'folding'
+ , folding: 'folding'
+ , foo: 'foo'
+ , fool: 'fool'
+ }
+
+This is handy for command-line scripts, or other cases where you want to be able to accept shorthands.
diff --git a/deps/npm/node_modules/abbrev/lib/abbrev.js b/deps/npm/node_modules/abbrev/lib/abbrev.js
new file mode 100644
index 0000000000..037de2d8da
--- /dev/null
+++ b/deps/npm/node_modules/abbrev/lib/abbrev.js
@@ -0,0 +1,106 @@
+
+module.exports = exports = abbrev.abbrev = abbrev
+
+abbrev.monkeyPatch = monkeyPatch
+
+function monkeyPatch () {
+ Array.prototype.abbrev = function () { return abbrev(this) }
+ Object.prototype.abbrev = function () { return abbrev(Object.keys(this)) }
+}
+
+function abbrev (list) {
+ if (arguments.length !== 1 || !Array.isArray(list)) {
+ list = Array.prototype.slice.call(arguments, 0)
+ }
+ for (var i = 0, l = list.length, args = [] ; i < l ; i ++) {
+ args[i] = typeof list[i] === "string" ? list[i] : String(list[i])
+ }
+
+ // sort them lexicographically, so that they're next to their nearest kin
+ args = args.sort(lexSort)
+
+ // walk through each, seeing how much it has in common with the next and previous
+ var abbrevs = {}
+ , prev = ""
+ for (var i = 0, l = args.length ; i < l ; i ++) {
+ var current = args[i]
+ , next = args[i + 1] || ""
+ , nextMatches = true
+ , prevMatches = true
+ if (current === next) continue
+ for (var j = 0, cl = current.length ; j < cl ; j ++) {
+ var curChar = current.charAt(j)
+ nextMatches = nextMatches && curChar === next.charAt(j)
+ prevMatches = prevMatches && curChar === prev.charAt(j)
+ if (nextMatches || prevMatches) continue
+ else {
+ j ++
+ break
+ }
+ }
+ prev = current
+ if (j === cl) {
+ abbrevs[current] = current
+ continue
+ }
+ for (var a = current.substr(0, j) ; j <= cl ; j ++) {
+ abbrevs[a] = current
+ a += current.charAt(j)
+ }
+ }
+ return abbrevs
+}
+
+function lexSort (a, b) {
+ return a === b ? 0 : a > b ? 1 : -1
+}
+
+
+// tests
+if (module === require.main) {
+
+var assert = require("assert")
+ , sys
+sys = require("util")
+
+console.log("running tests")
+function test (list, expect) {
+ var actual = abbrev(list)
+ assert.deepEqual(actual, expect,
+ "abbrev("+sys.inspect(list)+") === " + sys.inspect(expect) + "\n"+
+ "actual: "+sys.inspect(actual))
+ actual = abbrev.apply(exports, list)
+ assert.deepEqual(abbrev.apply(exports, list), expect,
+ "abbrev("+list.map(JSON.stringify).join(",")+") === " + sys.inspect(expect) + "\n"+
+ "actual: "+sys.inspect(actual))
+}
+
+test([ "ruby", "ruby", "rules", "rules", "rules" ],
+{ rub: 'ruby'
+, ruby: 'ruby'
+, rul: 'rules'
+, rule: 'rules'
+, rules: 'rules'
+})
+test(["fool", "foom", "pool", "pope"],
+{ fool: 'fool'
+, foom: 'foom'
+, poo: 'pool'
+, pool: 'pool'
+, pop: 'pope'
+, pope: 'pope'
+})
+test(["a", "ab", "abc", "abcd", "abcde", "acde"],
+{ a: 'a'
+, ab: 'ab'
+, abc: 'abc'
+, abcd: 'abcd'
+, abcde: 'abcde'
+, ac: 'acde'
+, acd: 'acde'
+, acde: 'acde'
+})
+
+console.log("pass")
+
+}
diff --git a/deps/npm/node_modules/abbrev/package.json b/deps/npm/node_modules/abbrev/package.json
new file mode 100644
index 0000000000..ebd082f5dd
--- /dev/null
+++ b/deps/npm/node_modules/abbrev/package.json
@@ -0,0 +1,8 @@
+{ "name" : "abbrev"
+, "version" : "1.0.3"
+, "description" : "Like ruby's abbrev module, but in js"
+, "author" : "Isaac Z. Schlueter <i@izs.me>"
+, "main" : "./lib/abbrev.js"
+, "scripts" : { "test" : "node lib/abbrev.js" }
+, "repository" : "http://github.com/isaacs/abbrev-js"
+}
diff --git a/deps/npm/node_modules/block-stream/README.md b/deps/npm/node_modules/block-stream/README.md
new file mode 100644
index 0000000000..c16e9c4688
--- /dev/null
+++ b/deps/npm/node_modules/block-stream/README.md
@@ -0,0 +1,14 @@
+# block-stream
+
+A stream of blocks.
+
+Write data into it, and it'll output data in buffer blocks the size you
+specify, padding with zeroes if necessary.
+
+```javascript
+var block = new BlockStream(512)
+fs.createReadStream("some-file").pipe(block)
+block.pipe(fs.createWriteStream("block-file"))
+```
+
+When `.end()` or `.flush()` is called, it'll pad the block with zeroes.
diff --git a/deps/npm/node_modules/block-stream/bench/block-stream.js b/deps/npm/node_modules/block-stream/bench/block-stream.js
new file mode 100644
index 0000000000..1141f3a84c
--- /dev/null
+++ b/deps/npm/node_modules/block-stream/bench/block-stream.js
@@ -0,0 +1,68 @@
+var BlockStream = require("../block-stream.js")
+
+var blockSizes = [16, 25, 1024]
+ , writeSizes = [4, 8, 15, 16, 17, 64, 100]
+ , writeCounts = [1, 10, 100]
+ , tap = require("tap")
+
+writeCounts.forEach(function (writeCount) {
+blockSizes.forEach(function (blockSize) {
+writeSizes.forEach(function (writeSize) {
+ tap.test("writeSize=" + writeSize +
+ " blockSize="+blockSize +
+ " writeCount="+writeCount, function (t) {
+ var f = new BlockStream(blockSize, {nopad: true })
+
+ var actualChunks = 0
+ var actualBytes = 0
+ var timeouts = 0
+
+ f.on("data", function (c) {
+ timeouts ++
+
+ actualChunks ++
+ actualBytes += c.length
+
+ // make sure that no data gets corrupted, and basic sanity
+ var before = c.toString()
+ // simulate a slow write operation
+ setTimeout(function () {
+ timeouts --
+
+ var after = c.toString()
+ t.equal(after, before, "should not change data")
+
+ // now corrupt it, to find leaks.
+ for (var i = 0; i < c.length; i ++) {
+ c[i] = "x".charCodeAt(0)
+ }
+ }, 100)
+ })
+
+ f.on("end", function () {
+ // round up to the nearest block size
+ var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
+ var expectBytes = writeSize * writeCount * 2
+ t.equal(actualBytes, expectBytes,
+ "bytes=" + expectBytes + " writeSize=" + writeSize)
+ t.equal(actualChunks, expectChunks,
+ "chunks=" + expectChunks + " writeSize=" + writeSize)
+
+ // wait for all the timeout checks to finish, then end the test
+ setTimeout(function WAIT () {
+ if (timeouts > 0) return setTimeout(WAIT)
+ t.end()
+ }, 100)
+ })
+
+ for (var i = 0; i < writeCount; i ++) {
+ var a = new Buffer(writeSize);
+ for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
+ var b = new Buffer(writeSize);
+ for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
+ f.write(a)
+ f.write(b)
+ }
+ f.end()
+ })
+}) }) })
diff --git a/deps/npm/node_modules/block-stream/bench/dropper-pause.js b/deps/npm/node_modules/block-stream/bench/dropper-pause.js
new file mode 100644
index 0000000000..93e4068eea
--- /dev/null
+++ b/deps/npm/node_modules/block-stream/bench/dropper-pause.js
@@ -0,0 +1,70 @@
+var BlockStream = require("dropper")
+
+var blockSizes = [16, 25, 1024]
+ , writeSizes = [4, 8, 15, 16, 17, 64, 100]
+ , writeCounts = [1, 10, 100]
+ , tap = require("tap")
+
+writeCounts.forEach(function (writeCount) {
+blockSizes.forEach(function (blockSize) {
+writeSizes.forEach(function (writeSize) {
+ tap.test("writeSize=" + writeSize +
+ " blockSize="+blockSize +
+ " writeCount="+writeCount, function (t) {
+ var f = new BlockStream(blockSize, {nopad: true })
+
+ var actualChunks = 0
+ var actualBytes = 0
+ var timeouts = 0
+
+ f.on("data", function (c) {
+ timeouts ++
+
+ actualChunks ++
+ actualBytes += c.length
+
+ // make sure that no data gets corrupted, and basic sanity
+ var before = c.toString()
+ // simulate a slow write operation
+ f.pause()
+ setTimeout(function () {
+ timeouts --
+
+ var after = c.toString()
+ t.equal(after, before, "should not change data")
+
+ // now corrupt it, to find leaks.
+ for (var i = 0; i < c.length; i ++) {
+ c[i] = "x".charCodeAt(0)
+ }
+ f.resume()
+ }, 100)
+ })
+
+ f.on("end", function () {
+ // round up to the nearest block size
+ var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
+ var expectBytes = writeSize * writeCount * 2
+ t.equal(actualBytes, expectBytes,
+ "bytes=" + expectBytes + " writeSize=" + writeSize)
+ t.equal(actualChunks, expectChunks,
+ "chunks=" + expectChunks + " writeSize=" + writeSize)
+
+ // wait for all the timeout checks to finish, then end the test
+ setTimeout(function WAIT () {
+ if (timeouts > 0) return setTimeout(WAIT)
+ t.end()
+ }, 100)
+ })
+
+ for (var i = 0; i < writeCount; i ++) {
+ var a = new Buffer(writeSize);
+ for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
+ var b = new Buffer(writeSize);
+ for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
+ f.write(a)
+ f.write(b)
+ }
+ f.end()
+ })
+}) }) })
diff --git a/deps/npm/node_modules/block-stream/bench/dropper.js b/deps/npm/node_modules/block-stream/bench/dropper.js
new file mode 100644
index 0000000000..55fa133054
--- /dev/null
+++ b/deps/npm/node_modules/block-stream/bench/dropper.js
@@ -0,0 +1,68 @@
+var BlockStream = require("dropper")
+
+var blockSizes = [16, 25, 1024]
+ , writeSizes = [4, 8, 15, 16, 17, 64, 100]
+ , writeCounts = [1, 10, 100]
+ , tap = require("tap")
+
+writeCounts.forEach(function (writeCount) {
+blockSizes.forEach(function (blockSize) {
+writeSizes.forEach(function (writeSize) {
+ tap.test("writeSize=" + writeSize +
+ " blockSize="+blockSize +
+ " writeCount="+writeCount, function (t) {
+ var f = new BlockStream(blockSize, {nopad: true })
+
+ var actualChunks = 0
+ var actualBytes = 0
+ var timeouts = 0
+
+ f.on("data", function (c) {
+ timeouts ++
+
+ actualChunks ++
+ actualBytes += c.length
+
+ // make sure that no data gets corrupted, and basic sanity
+ var before = c.toString()
+ // simulate a slow write operation
+ setTimeout(function () {
+ timeouts --
+
+ var after = c.toString()
+ t.equal(after, before, "should not change data")
+
+ // now corrupt it, to find leaks.
+ for (var i = 0; i < c.length; i ++) {
+ c[i] = "x".charCodeAt(0)
+ }
+ }, 100)
+ })
+
+ f.on("end", function () {
+ // round up to the nearest block size
+ var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
+ var expectBytes = writeSize * writeCount * 2
+ t.equal(actualBytes, expectBytes,
+ "bytes=" + expectBytes + " writeSize=" + writeSize)
+ t.equal(actualChunks, expectChunks,
+ "chunks=" + expectChunks + " writeSize=" + writeSize)
+
+ // wait for all the timeout checks to finish, then end the test
+ setTimeout(function WAIT () {
+ if (timeouts > 0) return setTimeout(WAIT)
+ t.end()
+ }, 100)
+ })
+
+ for (var i = 0; i < writeCount; i ++) {
+ var a = new Buffer(writeSize);
+ for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
+ var b = new Buffer(writeSize);
+ for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
+ f.write(a)
+ f.write(b)
+ }
+ f.end()
+ })
+}) }) })
diff --git a/deps/npm/node_modules/block-stream/block-stream.js b/deps/npm/node_modules/block-stream/block-stream.js
new file mode 100644
index 0000000000..008de035c2
--- /dev/null
+++ b/deps/npm/node_modules/block-stream/block-stream.js
@@ -0,0 +1,209 @@
+// write data to it, and it'll emit data in 512 byte blocks.
+// if you .end() or .flush(), it'll emit whatever it's got,
+// padded with nulls to 512 bytes.
+
+module.exports = BlockStream
+
+var Stream = require("stream").Stream
+ , inherits = require("inherits")
+ , assert = require("assert").ok
+ , debug = process.env.DEBUG ? console.error : function () {}
+
+function BlockStream (size, opt) {
+ this.writable = this.readable = true
+ this._opt = opt || {}
+ this._chunkSize = size || 512
+ this._offset = 0
+ this._buffer = []
+ this._bufferLength = 0
+ if (this._opt.nopad) this._zeroes = false
+ else {
+ this._zeroes = new Buffer(this._chunkSize)
+ for (var i = 0; i < this._chunkSize; i ++) {
+ this._zeroes[i] = 0
+ }
+ }
+}
+
+inherits(BlockStream, Stream)
+
+BlockStream.prototype.write = function (c) {
+ // debug(" BS write", c)
+ if (this._ended) throw new Error("BlockStream: write after end")
+ if (c && !Buffer.isBuffer(c)) c = new Buffer(c + "")
+ if (c.length) {
+ this._buffer.push(c)
+ this._bufferLength += c.length
+ }
+ // debug("pushed onto buffer", this._bufferLength)
+ if (this._bufferLength >= this._chunkSize) {
+ if (this._paused) {
+ // debug(" BS paused, return false, need drain")
+ this._needDrain = true
+ return false
+ }
+ this._emitChunk()
+ }
+ return true
+}
+
+BlockStream.prototype.pause = function () {
+ // debug(" BS pausing")
+ this._paused = true
+}
+
+BlockStream.prototype.resume = function () {
+ // debug(" BS resume")
+ this._paused = false
+ return this._emitChunk()
+}
+
+BlockStream.prototype.end = function (chunk) {
+ // debug("end", chunk)
+ if (typeof chunk === "function") cb = chunk, chunk = null
+ if (chunk) this.write(chunk)
+ this._ended = true
+ this.flush()
+}
+
+BlockStream.prototype.flush = function () {
+ this._emitChunk(true)
+}
+
+BlockStream.prototype._emitChunk = function (flush) {
+ // debug("emitChunk flush=%j emitting=%j paused=%j", flush, this._emitting, this._paused)
+
+ // emit a <chunkSize> chunk
+ if (flush && this._zeroes) {
+ // debug(" BS push zeroes", this._bufferLength)
+ // push a chunk of zeroes
+ var padBytes = (this._bufferLength % this._chunkSize)
+ if (padBytes !== 0) padBytes = this._chunkSize - padBytes
+ if (padBytes > 0) {
+ // debug("padBytes", padBytes, this._zeroes.slice(0, padBytes))
+ this._buffer.push(this._zeroes.slice(0, padBytes))
+ this._bufferLength += padBytes
+ // debug(this._buffer[this._buffer.length - 1].length, this._bufferLength)
+ }
+ }
+
+ if (this._emitting || this._paused) return
+ this._emitting = true
+
+ // debug(" BS entering loops")
+ var bufferIndex = 0
+ while (this._bufferLength >= this._chunkSize &&
+ (flush || !this._paused)) {
+ // debug(" BS data emission loop", this._bufferLength)
+
+ var out
+ , outOffset = 0
+ , outHas = this._chunkSize
+
+ while (outHas > 0 && (flush || !this._paused) ) {
+ // debug(" BS data inner emit loop", this._bufferLength)
+ var cur = this._buffer[bufferIndex]
+ , curHas = cur.length - this._offset
+ // debug("cur=", cur)
+ // debug("curHas=%j", curHas)
+ // If it's not big enough to fill the whole thing, then we'll need
+ // to copy multiple buffers into one. However, if it is big enough,
+ // then just slice out the part we want, to save unnecessary copying.
+ // Also, need to copy if we've already done some copying, since buffers
+ // can't be joined like cons strings.
+ if (out || curHas < outHas) {
+ out = out || new Buffer(this._chunkSize)
+ cur.copy(out, outOffset,
+ this._offset, this._offset + Math.min(curHas, outHas))
+ } else if (cur.length === outHas && this._offset === 0) {
+ // shortcut -- cur is exactly long enough, and no offset.
+ out = cur
+ } else {
+ // slice out the piece of cur that we need.
+ out = cur.slice(this._offset, this._offset + outHas)
+ }
+
+ if (curHas > outHas) {
+ // means that the current buffer couldn't be completely output
+ // update this._offset to reflect how much WAS written
+ this._offset += outHas
+ outHas = 0
+ } else {
+ // output the entire current chunk.
+ // toss it away
+ outHas -= curHas
+ outOffset += curHas
+ bufferIndex ++
+ this._offset = 0
+ }
+ }
+
+ this._bufferLength -= this._chunkSize
+ assert(out.length === this._chunkSize)
+ // debug("emitting data", out)
+ // debug(" BS emitting, paused=%j", this._paused, this._bufferLength)
+ this.emit("data", out)
+ out = null
+ }
+ // debug(" BS out of loops", this._bufferLength)
+
+ // whatever is left, it's not enough to fill up a block, or we're paused
+ this._buffer = this._buffer.slice(bufferIndex)
+ if (this._paused) {
+ // debug(" BS paused, leaving", this._bufferLength)
+ this._needsDrain = true
+ this._emitting = false
+ return
+ }
+
+ // if flushing, and not using null-padding, then need to emit the last
+ // chunk(s) sitting in the queue. We know that it's not enough to
+ // fill up a whole block, because otherwise it would have been emitted
+ // above, but there may be some offset.
+ var l = this._buffer.length
+ if (flush && !this._zeroes && l) {
+ if (l === 1) {
+ if (this._offset) {
+ this.emit("data", this._buffer[0].slice(this._offset))
+ } else {
+ this.emit("data", this._buffer[0])
+ }
+ } else {
+ var outHas = this._bufferLength
+ , out = new Buffer(outHas)
+ , outOffset = 0
+ for (var i = 0; i < l; i ++) {
+ var cur = this._buffer[i]
+ , curHas = cur.length - this._offset
+ cur.copy(out, outOffset, this._offset)
+ this._offset = 0
+ outOffset += curHas
+ this._bufferLength -= curHas
+ }
+ this.emit("data", out)
+ }
+ // truncate
+ this._buffer.length = 0
+ this._bufferLength = 0
+ this._offset = 0
+ }
+
+ // now either drained or ended
+ // debug("either draining, or ended", this._bufferLength, this._ended)
+ // means that we've flushed out all that we can so far.
+ if (this._needDrain) {
+ // debug("emitting drain", this._bufferLength)
+ this._needDrain = false
+ this.emit("drain")
+ }
+
+ if ((this._bufferLength === 0) && this._ended && !this._endEmitted) {
+ // debug("emitting end", this._bufferLength)
+ this._endEmitted = true
+ this.emit("end")
+ }
+
+ this._emitting = false
+
+ // debug(" BS no longer emitting", flush, this._paused, this._emitting, this._bufferLength, this._chunkSize)
+}
diff --git a/deps/npm/node_modules/block-stream/package.json b/deps/npm/node_modules/block-stream/package.json
new file mode 100644
index 0000000000..203961a144
--- /dev/null
+++ b/deps/npm/node_modules/block-stream/package.json
@@ -0,0 +1,23 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "name": "block-stream",
+ "description": "a stream of blocks",
+ "version": "0.0.4",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/block-stream.git"
+ },
+ "engines": {
+ "node": "0.4 || ~0.5.8 || 0.6"
+ },
+ "main": "block-stream.js",
+ "dependencies": {
+ "inherits": "~1.0.0"
+ },
+ "devDependencies": {
+ "tap": "0.x"
+ },
+ "scripts": {
+ "test": "tap test/"
+ }
+}
diff --git a/deps/npm/node_modules/block-stream/test/basic.js b/deps/npm/node_modules/block-stream/test/basic.js
new file mode 100644
index 0000000000..b4b930511e
--- /dev/null
+++ b/deps/npm/node_modules/block-stream/test/basic.js
@@ -0,0 +1,27 @@
+var tap = require("tap")
+ , BlockStream = require("../block-stream.js")
+
+tap.test("basic test", function (t) {
+ var b = new BlockStream(16)
+ var fs = require("fs")
+ var fstr = fs.createReadStream(__filename, {encoding: "utf8"})
+ fstr.pipe(b)
+
+ var stat
+ t.doesNotThrow(function () {
+ stat = fs.statSync(__filename)
+ }, "stat should not throw")
+
+ var totalBytes = 0
+ b.on("data", function (c) {
+ t.equal(c.length, 16, "chunks should be 16 bytes long")
+ t.type(c, Buffer, "chunks should be buffer objects")
+ totalBytes += c.length
+ })
+ b.on("end", function () {
+ var expectedBytes = stat.size + (16 - stat.size % 16)
+ t.equal(totalBytes, expectedBytes, "Should be multiple of 16")
+ t.end()
+ })
+
+})
diff --git a/deps/npm/node_modules/block-stream/test/nopad-thorough.js b/deps/npm/node_modules/block-stream/test/nopad-thorough.js
new file mode 100644
index 0000000000..1141f3a84c
--- /dev/null
+++ b/deps/npm/node_modules/block-stream/test/nopad-thorough.js
@@ -0,0 +1,68 @@
+var BlockStream = require("../block-stream.js")
+
+var blockSizes = [16, 25, 1024]
+ , writeSizes = [4, 8, 15, 16, 17, 64, 100]
+ , writeCounts = [1, 10, 100]
+ , tap = require("tap")
+
+writeCounts.forEach(function (writeCount) {
+blockSizes.forEach(function (blockSize) {
+writeSizes.forEach(function (writeSize) {
+ tap.test("writeSize=" + writeSize +
+ " blockSize="+blockSize +
+ " writeCount="+writeCount, function (t) {
+ var f = new BlockStream(blockSize, {nopad: true })
+
+ var actualChunks = 0
+ var actualBytes = 0
+ var timeouts = 0
+
+ f.on("data", function (c) {
+ timeouts ++
+
+ actualChunks ++
+ actualBytes += c.length
+
+ // make sure that no data gets corrupted, and basic sanity
+ var before = c.toString()
+ // simulate a slow write operation
+ setTimeout(function () {
+ timeouts --
+
+ var after = c.toString()
+ t.equal(after, before, "should not change data")
+
+ // now corrupt it, to find leaks.
+ for (var i = 0; i < c.length; i ++) {
+ c[i] = "x".charCodeAt(0)
+ }
+ }, 100)
+ })
+
+ f.on("end", function () {
+ // round up to the nearest block size
+ var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
+ var expectBytes = writeSize * writeCount * 2
+ t.equal(actualBytes, expectBytes,
+ "bytes=" + expectBytes + " writeSize=" + writeSize)
+ t.equal(actualChunks, expectChunks,
+ "chunks=" + expectChunks + " writeSize=" + writeSize)
+
+ // wait for all the timeout checks to finish, then end the test
+ setTimeout(function WAIT () {
+ if (timeouts > 0) return setTimeout(WAIT)
+ t.end()
+ }, 100)
+ })
+
+ for (var i = 0; i < writeCount; i ++) {
+ var a = new Buffer(writeSize);
+ for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
+ var b = new Buffer(writeSize);
+ for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
+ f.write(a)
+ f.write(b)
+ }
+ f.end()
+ })
+}) }) })
diff --git a/deps/npm/node_modules/block-stream/test/nopad.js b/deps/npm/node_modules/block-stream/test/nopad.js
new file mode 100644
index 0000000000..6d38429fbc
--- /dev/null
+++ b/deps/npm/node_modules/block-stream/test/nopad.js
@@ -0,0 +1,57 @@
+var BlockStream = require("../")
+var tap = require("tap")
+
+
+tap.test("don't pad, small writes", function (t) {
+ var f = new BlockStream(16, { nopad: true })
+ t.plan(1)
+
+ f.on("data", function (c) {
+ t.equal(c.toString(), "abc", "should get 'abc'")
+ })
+
+ f.on("end", function () { t.end() })
+
+ f.write(new Buffer("a"))
+ f.write(new Buffer("b"))
+ f.write(new Buffer("c"))
+ f.end()
+})
+
+tap.test("don't pad, exact write", function (t) {
+ var f = new BlockStream(16, { nopad: true })
+ t.plan(1)
+
+ var first = true
+ f.on("data", function (c) {
+ if (first) {
+ first = false
+ t.equal(c.toString(), "abcdefghijklmnop", "first chunk")
+ } else {
+ t.fail("should only get one")
+ }
+ })
+
+ f.on("end", function () { t.end() })
+
+ f.end(new Buffer("abcdefghijklmnop"))
+})
+
+tap.test("don't pad, big write", function (t) {
+ var f = new BlockStream(16, { nopad: true })
+ t.plan(2)
+
+ var first = true
+ f.on("data", function (c) {
+ if (first) {
+ first = false
+ t.equal(c.toString(), "abcdefghijklmnop", "first chunk")
+ } else {
+ t.equal(c.toString(), "q")
+ }
+ })
+
+ f.on("end", function () { t.end() })
+
+ f.end(new Buffer("abcdefghijklmnopq"))
+})
diff --git a/deps/npm/node_modules/block-stream/test/pause-resume.js b/deps/npm/node_modules/block-stream/test/pause-resume.js
new file mode 100644
index 0000000000..248cf9cbd5
--- /dev/null
+++ b/deps/npm/node_modules/block-stream/test/pause-resume.js
@@ -0,0 +1,73 @@
+var BlockStream = require("../block-stream.js")
+
+var blockSizes = [16]
+ , writeSizes = [15, 16, 17]
+ , writeCounts = [1, 10, 100]
+ , tap = require("tap")
+
+writeCounts.forEach(function (writeCount) {
+blockSizes.forEach(function (blockSize) {
+writeSizes.forEach(function (writeSize) {
+ tap.test("writeSize=" + writeSize +
+ " blockSize="+blockSize +
+ " writeCount="+writeCount, function (t) {
+ var f = new BlockStream(blockSize)
+
+ var actualChunks = 0
+ var actualBytes = 0
+ var timeouts = 0
+ var paused = false
+
+ f.on("data", function (c) {
+ timeouts ++
+ t.notOk(paused, "should not be paused when emitting data")
+
+ actualChunks ++
+ actualBytes += c.length
+
+ // make sure that no data gets corrupted, and basic sanity
+ var before = c.toString()
+ // simulate a slow write operation
+ paused = true
+ f.pause()
+ process.nextTick(function () {
+ var after = c.toString()
+ t.equal(after, before, "should not change data")
+
+ // now corrupt it, to find leaks.
+ for (var i = 0; i < c.length; i ++) {
+ c[i] = "x".charCodeAt(0)
+ }
+ paused = false
+ f.resume()
+ timeouts --
+ })
+ })
+
+ f.on("end", function () {
+ // round up to the nearest block size
+ var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
+ var expectBytes = expectChunks * blockSize
+ t.equal(actualBytes, expectBytes,
+ "bytes=" + expectBytes + " writeSize=" + writeSize)
+ t.equal(actualChunks, expectChunks,
+ "chunks=" + expectChunks + " writeSize=" + writeSize)
+
+ // wait for all the timeout checks to finish, then end the test
+ setTimeout(function WAIT () {
+ if (timeouts > 0) return setTimeout(WAIT)
+ t.end()
+ }, 200)
+ })
+
+ for (var i = 0; i < writeCount; i ++) {
+ var a = new Buffer(writeSize);
+ for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
+ var b = new Buffer(writeSize);
+ for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
+ f.write(a)
+ f.write(b)
+ }
+ f.end()
+ })
+}) }) })
diff --git a/deps/npm/node_modules/block-stream/test/thorough.js b/deps/npm/node_modules/block-stream/test/thorough.js
new file mode 100644
index 0000000000..4ab14ff208
--- /dev/null
+++ b/deps/npm/node_modules/block-stream/test/thorough.js
@@ -0,0 +1,68 @@
+var BlockStream = require("../block-stream.js")
+
+var blockSizes = [16, 25, 1024]
+ , writeSizes = [4, 8, 15, 16, 17, 64, 100]
+ , writeCounts = [1, 10, 100]
+ , tap = require("tap")
+
+writeCounts.forEach(function (writeCount) {
+blockSizes.forEach(function (blockSize) {
+writeSizes.forEach(function (writeSize) {
+ tap.test("writeSize=" + writeSize +
+ " blockSize="+blockSize +
+ " writeCount="+writeCount, function (t) {
+ var f = new BlockStream(blockSize)
+
+ var actualChunks = 0
+ var actualBytes = 0
+ var timeouts = 0
+
+ f.on("data", function (c) {
+ timeouts ++
+
+ actualChunks ++
+ actualBytes += c.length
+
+ // make sure that no data gets corrupted, and basic sanity
+ var before = c.toString()
+ // simulate a slow write operation
+ setTimeout(function () {
+ timeouts --
+
+ var after = c.toString()
+ t.equal(after, before, "should not change data")
+
+ // now corrupt it, to find leaks.
+ for (var i = 0; i < c.length; i ++) {
+ c[i] = "x".charCodeAt(0)
+ }
+ }, 100)
+ })
+
+ f.on("end", function () {
+ // round up to the nearest block size
+ var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
+ var expectBytes = expectChunks * blockSize
+ t.equal(actualBytes, expectBytes,
+ "bytes=" + expectBytes + " writeSize=" + writeSize)
+ t.equal(actualChunks, expectChunks,
+ "chunks=" + expectChunks + " writeSize=" + writeSize)
+
+ // wait for all the timeout checks to finish, then end the test
+ setTimeout(function WAIT () {
+ if (timeouts > 0) return setTimeout(WAIT)
+ t.end()
+ }, 100)
+ })
+
+ for (var i = 0; i < writeCount; i ++) {
+ var a = new Buffer(writeSize);
+ for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
+ var b = new Buffer(writeSize);
+ for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
+ f.write(a)
+ f.write(b)
+ }
+ f.end()
+ })
+}) }) })
diff --git a/deps/npm/node_modules/fstream/.gitignore b/deps/npm/node_modules/fstream/.gitignore
new file mode 100644
index 0000000000..66880db1ab
--- /dev/null
+++ b/deps/npm/node_modules/fstream/.gitignore
@@ -0,0 +1,3 @@
+.*.swp
+examples/deep-copy
+node_modules/
diff --git a/deps/npm/node_modules/fstream/README.md b/deps/npm/node_modules/fstream/README.md
new file mode 100644
index 0000000000..9d8cb77e5c
--- /dev/null
+++ b/deps/npm/node_modules/fstream/README.md
@@ -0,0 +1,76 @@
+Like FS streams, but with stat on them, and supporting directories and
+symbolic links, as well as normal files. Also, you can use this to set
+the stats on a file, even if you don't change its contents, or to create
+a symlink, etc.
+
+So, for example, you can "write" a directory, and it'll call `mkdir`. You
+can specify a uid and gid, and it'll call `chown`. You can specify a
+`mtime` and `atime`, and it'll call `utimes`. You can call it a symlink
+and provide a `linkpath` and it'll call `symlink`.
+
+Note that it won't automatically resolve symbolic links. So, if you
+call `fstream.Reader('/some/symlink')` then you'll get an object
+that stats and then ends immediately (since it has no data). To follow
+symbolic links, do this: `fstream.Reader({path:'/some/symlink', follow:
+true })`.
+
+There are various checks to make sure that the bytes emitted are the
+same as the intended size, if the size is set.
+
+## Examples
+
+```javascript
+fstream
+ .Writer({ path: "path/to/file"
+ , mode: 0755
+ , size: 6
+ })
+ .write("hello\n")
+ .end()
+```
+
+This will create the directories if they're missing, and then write
+`hello\n` into the file, chmod it to 0755, and assert that 6 bytes have
+been written when it's done.
+
+```javascript
+fstream
+ .Writer({ path: "path/to/file"
+ , mode: 0755
+ , size: 6
+ , flags: "a"
+ })
+ .write("hello\n")
+ .end()
+```
+
+You can pass flags in, if you want to append to a file.
+
+```javascript
+fstream
+ .Writer({ path: "path/to/symlink"
+ , linkpath: "./file"
+ , SymbolicLink: true
+ , mode: "0755" // octal strings supported
+ })
+ .end()
+```
+
+If isSymbolicLink is a function, it'll be called, and if it returns
+true, then it'll treat it as a symlink. If it's not a function, then
+any truish value will make a symlink, or you can set `type:
+'SymbolicLink'`, which does the same thing.
+
+Note that the linkpath is relative to the symbolic link location, not
+the parent dir or cwd.
+
+```javascript
+fstream
+ .Reader("path/to/dir")
+ .pipe(fstream.Writer("path/to/other/dir"))
+```
+
+This will do like `cp -Rp path/to/dir path/to/other/dir`. If the other
+dir exists and isn't a directory, then it'll emit an error. It'll also
+set the uid, gid, mode, etc. to be identical. In this way, it's more
+like `rsync -a` than simply a copy.
diff --git a/deps/npm/node_modules/fstream/examples/pipe.js b/deps/npm/node_modules/fstream/examples/pipe.js
new file mode 100644
index 0000000000..b08d79481b
--- /dev/null
+++ b/deps/npm/node_modules/fstream/examples/pipe.js
@@ -0,0 +1,113 @@
+var fstream = require("../fstream.js")
+var path = require("path")
+
+var r = fstream.Reader({ path: path.dirname(__dirname)
+ , filter: function () {
+ return !this.basename.match(/^\./) &&
+ !this.basename.match(/^node_modules$/)
+ !this.basename.match(/^deep-copy$/)
+ }
+ })
+
+var w = fstream.Writer({ path: path.resolve(__dirname, "deep-copy")
+ , type: "Directory"
+ })
+
+var indent = ""
+var escape = {}
+
+r.on("entry", appears)
+//r.on("ready", function () {
+// appears(r)
+//})
+
+function appears (entry) {
+ console.error(indent + "a %s appears!", entry.type, entry.basename)
+ if (foggy) {
+ console.error("FOGGY!")
+ var p = entry
+ do {
+ console.error(p.depth, p.path, p._paused)
+ } while (p = p.parent)
+
+ throw new Error("\033[mshould not have entries while foggy")
+ }
+ indent += "\t"
+ entry.on("data", missile(entry))
+ entry.on("end", runaway(entry))
+ entry.on("entry", appears)
+}
+
+var foggy
+function missile (entry) {
+ if (entry.type === "Directory") {
+ var ended = false
+ entry.once("end", function () { ended = true })
+ return function (c) {
+ // throw in some pathological pause()/resume() behavior
+ // just for extra fun.
+ process.nextTick(function () {
+ if (!foggy && !ended) { // && Math.random() < 0.3) {
+ console.error(indent +"%s casts a spell", entry.basename)
+ console.error("\na slowing fog comes over the battlefield...\n\033[32m")
+ entry.pause()
+ entry.once("resume", liftFog)
+ foggy = setTimeout(liftFog, 10)
+
+ function liftFog (who) {
+ if (!foggy) return
+ if (who) {
+ console.error("%s breaks the spell!", who && who.path)
+ } else {
+ console.error("the spell expires!")
+ }
+ console.error("\033[mthe fog lifts!\n")
+ clearTimeout(foggy)
+ foggy = null
+ if (entry._paused) entry.resume()
+ }
+
+ }
+ })
+ }
+ }
+
+ return function (c) {
+ var e = Math.random() < 0.5
+ console.error(indent + "%s %s for %d damage!",
+ entry.basename,
+ e ? "is struck" : "fires a chunk",
+ c.length)
+ }
+}
+
+function runaway (entry) { return function () {
+ var e = Math.random() < 0.5
+ console.error(indent + "%s %s",
+ entry.basename,
+ e ? "turns to flee" : "is vanquished!")
+ indent = indent.slice(0, -1)
+}}
+
+
+w.on("entry", attacks)
+//w.on("ready", function () { attacks(w) })
+function attacks (entry) {
+ console.error(indent + "%s %s!", entry.basename,
+ entry.type === "Directory" ? "calls for backup" : "attacks")
+ entry.on("entry", attacks)
+}
+
+ended = false
+r.on("end", function () {
+ if (foggy) clearTimeout(foggy)
+ console.error("\033[mIT'S OVER!!")
+ console.error("A WINNAR IS YOU!")
+ ended = true
+})
+
+process.on("exit", function () {
+ console.error("ended? "+ended)
+})
+r.pipe(w)
+
diff --git a/deps/npm/node_modules/fstream/examples/reader.js b/deps/npm/node_modules/fstream/examples/reader.js
new file mode 100644
index 0000000000..73075102f4
--- /dev/null
+++ b/deps/npm/node_modules/fstream/examples/reader.js
@@ -0,0 +1,29 @@
+var fstream = require("../fstream.js")
+var path = require("path")
+
+var r = fstream.Reader({ path: path.dirname(__dirname)
+ , filter: function () {
+ return !this.basename.match(/^\./)
+ }
+ })
+
+console.error(r instanceof fstream.Reader)
+console.error(r instanceof require("stream").Stream)
+console.error(r instanceof require("events").EventEmitter)
+console.error(r.on)
+
+r.on("stat", function () {
+ console.error("a %s !!!\t", r.type, r.path)
+})
+
+r.on("entries", function (entries) {
+ console.error("\t" + entries.join("\n\t"))
+})
+
+r.on("entry", function (entry) {
+ console.error("a %s !!!\t", entry.type, entry.path)
+})
+
+r.on("end", function () {
+ console.error("IT'S OVER!!")
+})
diff --git a/deps/npm/node_modules/fstream/examples/symlink-write.js b/deps/npm/node_modules/fstream/examples/symlink-write.js
new file mode 100644
index 0000000000..657375b988
--- /dev/null
+++ b/deps/npm/node_modules/fstream/examples/symlink-write.js
@@ -0,0 +1,9 @@
+var fstream = require("../fstream.js")
+
+fstream
+ .Writer({ path: "path/to/symlink"
+ , linkpath: "./file"
+ , isSymbolicLink: true
+ , mode: "0755" // octal strings supported
+ })
+ .end()
diff --git a/deps/npm/node_modules/fstream/fstream.js b/deps/npm/node_modules/fstream/fstream.js
new file mode 100644
index 0000000000..c66d26f519
--- /dev/null
+++ b/deps/npm/node_modules/fstream/fstream.js
@@ -0,0 +1,31 @@
+exports.Abstract = require("./lib/abstract.js")
+exports.Reader = require("./lib/reader.js")
+exports.Writer = require("./lib/writer.js")
+
+exports.File =
+ { Reader: require("./lib/file-reader.js")
+ , Writer: require("./lib/file-writer.js") }
+
+exports.Dir =
+ { Reader : require("./lib/dir-reader.js")
+ , Writer : require("./lib/dir-writer.js") }
+
+exports.Link =
+ { Reader : require("./lib/link-reader.js")
+ , Writer : require("./lib/link-writer.js") }
+
+exports.Proxy =
+ { Reader : require("./lib/proxy-reader.js")
+ , Writer : require("./lib/proxy-writer.js") }
+
+exports.Reader.Dir = exports.DirReader = exports.Dir.Reader
+exports.Reader.File = exports.FileReader = exports.File.Reader
+exports.Reader.Link = exports.LinkReader = exports.Link.Reader
+exports.Reader.Proxy = exports.ProxyReader = exports.Proxy.Reader
+
+exports.Writer.Dir = exports.DirWriter = exports.Dir.Writer
+exports.Writer.File = exports.FileWriter = exports.File.Writer
+exports.Writer.Link = exports.LinkWriter = exports.Link.Writer
+exports.Writer.Proxy = exports.ProxyWriter = exports.Proxy.Writer
+
+exports.collect = require("./lib/collect.js")
diff --git a/deps/npm/node_modules/fstream/lib/abstract.js b/deps/npm/node_modules/fstream/lib/abstract.js
new file mode 100644
index 0000000000..add48b945d
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/abstract.js
@@ -0,0 +1,82 @@
+// the parent class for all fstreams.
+
+module.exports = Abstract
+
+var Stream = require("stream").Stream
+ , inherits = require("inherits")
+
+function Abstract () {
+ Stream.call(this)
+}
+
+inherits(Abstract, Stream)
+
+Abstract.prototype.on = function (ev, fn) {
+ if (ev === "ready" && this.ready) {
+ process.nextTick(fn.bind(this))
+ } else {
+ Stream.prototype.on.call(this, ev, fn)
+ }
+ return this
+}
+
+Abstract.prototype.destroy = function () {}
+
+Abstract.prototype.warn = function (msg, code) {
+ var me = this
+ , er = decorate(msg, code, me)
+ if (!me.listeners("warn")) {
+ console.error("%s %s\n" +
+ "path = %s\n" +
+ "syscall = %s\n" +
+ "fstream_type = %s\n" +
+ "fstream_path = %s\n" +
+ "fstream_unc_path = %s\n" +
+ "fstream_class = %s\n" +
+ "fstream_stack =\n%s\n",
+ code || "UNKNOWN",
+ er.stack,
+ er.path,
+ er.syscall,
+ er.fstream_type,
+ er.fstream_path,
+ er.fstream_unc_path,
+ er.fstream_class,
+ er.fstream_stack.join("\n"))
+ } else {
+ me.emit("warn", er)
+ }
+}
+
+Abstract.prototype.info = function (msg, code) {
+ var me = this
+ if (!me.listeners("info")) return
+ me.emit("info", msg, code)
+}
+
+Abstract.prototype.error = function (msg, code, th) {
+ var er = decorate(msg, code, this)
+ if (th) throw er
+ else this.emit("error", er)
+}
+
+function decorate (er, code, me) {
+ if (!(er instanceof Error)) er = new Error(er)
+ er.code = er.code || code
+ er.path = er.path || me.path
+ er.fstream_type = er.fstream_type || me.type
+ er.fstream_path = er.fstream_path || me.path
+ if (me._path !== me.path) {
+ er.fstream_unc_path = er.fstream_unc_path || me._path
+ }
+ if (me.linkpath) {
+ er.fstream_linkpath = er.fstream_linkpath || me.linkpath
+ }
+ er.fstream_class = er.fstream_class || me.constructor.name
+ er.fstream_stack = er.fstream_stack ||
+ new Error().stack.split(/\n/).slice(3).map(function (s) {
+ return s.replace(/^ at /, "")
+ })
+
+ return er
+}
diff --git a/deps/npm/node_modules/fstream/lib/collect.js b/deps/npm/node_modules/fstream/lib/collect.js
new file mode 100644
index 0000000000..a36f780eb2
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/collect.js
@@ -0,0 +1,67 @@
+module.exports = collect
+
+function collect (stream) {
+ if (stream._collected) return
+
+ stream._collected = true
+ stream.pause()
+
+ stream.on("data", save)
+ stream.on("end", save)
+ var buf = []
+ function save (b) {
+ if (typeof b === "string") b = new Buffer(b)
+ if (Buffer.isBuffer(b) && !b.length) return
+ buf.push(b)
+ }
+
+ stream.on("entry", saveEntry)
+ var entryBuffer = []
+ function saveEntry (e) {
+ collect(e)
+ entryBuffer.push(e)
+ }
+
+ stream.on("proxy", proxyPause)
+ function proxyPause (p) {
+ p.pause()
+ }
+
+
+ // replace the pipe method with a new version that will
+ // unlock the buffered stuff. if you just call .pipe()
+ // without a destination, then it'll re-play the events.
+ stream.pipe = (function (orig) { return function (dest) {
+ // console.error(" === open the pipes", dest && dest.path)
+
+ // let the entries flow through one at a time.
+ // Once they're all done, then we can resume completely.
+ var e = 0
+ ;(function unblockEntry () {
+ var entry = entryBuffer[e++]
+ // console.error(" ==== unblock entry", entry && entry.path)
+ if (!entry) return resume()
+ entry.on("end", unblockEntry)
+ if (dest) dest.add(entry)
+ else stream.emit("entry", entry)
+ })()
+
+ function resume () {
+ stream.removeListener("entry", saveEntry)
+ stream.removeListener("data", save)
+ stream.removeListener("end", save)
+
+ stream.pipe = orig
+ if (dest) stream.pipe(dest)
+
+ buf.forEach(function (b) {
+ if (b) stream.emit("data", b)
+ else stream.emit("end")
+ })
+
+ stream.resume()
+ }
+
+ return dest
+ }})(stream.pipe)
+}
diff --git a/deps/npm/node_modules/fstream/lib/dir-reader.js b/deps/npm/node_modules/fstream/lib/dir-reader.js
new file mode 100644
index 0000000000..a7806dcbb0
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/dir-reader.js
@@ -0,0 +1,180 @@
+// A thing that emits "entry" events with Reader objects
+// Pausing it causes it to stop emitting entry events, and also
+// pauses the current entry if there is one.
+
+module.exports = DirReader
+
+var fs = require("graceful-fs")
+ , fstream = require("../fstream.js")
+ , Reader = fstream.Reader
+ , inherits = require("inherits")
+ , mkdir = require("mkdirp")
+ , path = require("path")
+ , Reader = require("./reader.js")
+
+inherits(DirReader, Reader)
+
+function DirReader (props) {
+ var me = this
+ if (!(me instanceof DirReader)) throw new Error(
+ "DirReader must be called as constructor.")
+
+ // should already be established as a Directory type
+ if (props.type !== "Directory" || !props.Directory) {
+ throw new Error("Non-directory type "+ props.type)
+ }
+
+ me._entries = null
+ me._index = -1
+ me._paused = false
+ me._length = -1
+
+ Reader.call(this, props)
+}
+
+DirReader.prototype._getEntries = function () {
+ var me = this
+ fs.readdir(me._path, function (er, entries) {
+ if (er) return me.error(er)
+ me._entries = entries
+ me._length = entries.length
+ if (typeof me.props.sort === "function") {
+ me._entries.sort(me.props.sort)
+ }
+ me._read()
+ })
+}
+
+// start walking the dir, and emit an "entry" event for each one.
+DirReader.prototype._read = function () {
+ var me = this
+
+ if (!me._entries) return me._getEntries()
+
+ if (me._paused || me._currentEntry || me._aborted) {
+ // console.error("DR paused=%j, current=%j, aborted=%j", me._paused, !!me._currentEntry, me._aborted)
+ return
+ }
+
+ me._index ++
+ if (me._index >= me._length) {
+ // console.error(" DR End/close", me._path)
+ me.emit("end")
+ me.emit("close")
+ return
+ }
+
+ // ok, handle this one, then.
+
+ // save creating a proxy, by stat'ing the thing now.
+ var p = path.resolve(me._path, me._entries[me._index])
+ // set this to prevent trying to _read() again in the stat time.
+ me._currentEntry = p
+ fs[ me.props.follow ? "stat" : "lstat" ](p, function (er, stat) {
+ if (er) return me.error(er)
+
+ var entry = Reader({ path: p
+ , depth: me.depth + 1
+ , root: me.root || me
+ , parent: me
+ , follow: me.follow
+ , filter: me.filter
+ }, stat)
+
+ // console.error("DR Entry", p, stat.size)
+
+ me._currentEntry = entry
+
+ // "entry" events are for direct entries in a specific dir.
+ // "child" events are for any and all children at all levels.
+ // This nomenclature is not completely final.
+
+ entry.on("pause", function (who) {
+ if (!me._paused) {
+ me.pause(who)
+ }
+ })
+
+ entry.on("resume", function (who) {
+ if (me._paused) {
+ me.resume(who)
+ }
+ })
+
+ entry.on("ready", function EMITCHILD () {
+ // console.error("DR emit child", entry._path)
+ if (me._paused) {
+ // console.error(" DR emit child - try again later")
+ // pause the child, and emit the "entry" event once we drain.
+ // console.error("DR pausing child entry")
+ entry.pause(me)
+ return me.once("resume", EMITCHILD)
+ }
+
+ me.emit("entry", entry)
+ me.emit("child", entry)
+ })
+
+ var ended = false
+ entry.on("close", onend)
+ function onend () {
+ if (ended) return
+ ended = true
+ me.emit("childEnd", entry)
+ me.emit("entryEnd", entry)
+ me._currentEntry = null
+ me._read()
+ }
+
+ // XXX Make this work in node.
+ // Long filenames should not break stuff.
+ entry.on("error", function (er) {
+ if (entry._swallowErrors) {
+ me.warn(er)
+ entry.emit("end")
+ entry.emit("close")
+ } else {
+ me.emit("error", er)
+ }
+ })
+
+ // proxy up some events.
+ ; [ "child"
+ , "childEnd"
+ , "warn"
+ ].forEach(function (ev) {
+ entry.on(ev, me.emit.bind(me, ev))
+ })
+ })
+}
+
+DirReader.prototype.pause = function (who) {
+ var me = this
+ if (me._paused) return
+ who = who || me
+ me._paused = true
+ if (me._currentEntry && me._currentEntry.pause) {
+ me._currentEntry.pause(who)
+ }
+ me.emit("pause", who)
+}
+
+DirReader.prototype.resume = function (who) {
+ var me = this
+ if (!me._paused) return
+ who = who || me
+
+ me._paused = false
+ // console.error("DR Emit Resume", me._path)
+ me.emit("resume", who)
+ if (me._paused) {
+ // console.error("DR Re-paused", me._path)
+ return
+ }
+
+ if (me._currentEntry) {
+ if (me._currentEntry.resume) {
+ me._currentEntry.resume(who)
+ }
+ } else me._read()
+}
diff --git a/deps/npm/node_modules/fstream/lib/dir-writer.js b/deps/npm/node_modules/fstream/lib/dir-writer.js
new file mode 100644
index 0000000000..01920244c1
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/dir-writer.js
@@ -0,0 +1,165 @@
+// It is expected that, when .add() returns false, the consumer
+// of the DirWriter will pause until a "drain" event occurs. Note
+// that this is *almost always going to be the case*, unless the
+// thing being written is some sort of unsupported type, and thus
+// skipped over.
+
+module.exports = DirWriter
+
+var fs = require("graceful-fs")
+ , fstream = require("../fstream.js")
+ , Writer = require("./writer.js")
+ , inherits = require("inherits")
+ , mkdir = require("mkdirp")
+ , path = require("path")
+ , collect = require("./collect.js")
+
+inherits(DirWriter, Writer)
+
+function DirWriter (props) {
+ var me = this
+ if (!(me instanceof DirWriter)) me.error(
+ "DirWriter must be called as constructor.", null, true)
+
+ // should already be established as a Directory type
+ if (props.type !== "Directory" || !props.Directory) {
+ me.error("Non-directory type "+ props.type + " " +
+ JSON.stringify(props), null, true)
+ }
+
+ Writer.call(this, props)
+}
+
+DirWriter.prototype._create = function () {
+ var me = this
+ mkdir(me._path, Writer.dirmode, function (er) {
+ if (er) return me.error(er)
+ // ready to start getting entries!
+ me.ready = true
+ me.emit("ready")
+ })
+}
+
+// a DirWriter has an add(entry) method, but its .write() doesn't
+// do anything. Why a no-op rather than a throw? Because this
+// leaves open the door for writing directory metadata for
+// gnu/solaris style dumpdirs.
+DirWriter.prototype.write = function () {
+ return true
+}
+
+DirWriter.prototype.end = function () {
+ this._ended = true
+ this._process()
+}
+
+DirWriter.prototype.add = function (entry) {
+ var me = this
+
+ // console.error("\tadd", entry._path, "->", me._path)
+ collect(entry)
+ if (!me.ready || me._currentEntry) {
+ me._buffer.push(entry)
+ return false
+ }
+
+ // create a new writer, and pipe the incoming entry into it.
+ if (me._ended) {
+ return me.error("add after end")
+ }
+
+ me._buffer.push(entry)
+ me._process()
+
+ return 0 === this._buffer.length
+}
+
+DirWriter.prototype._process = function () {
+ var me = this
+
+ // console.error("DW Process p=%j", me._processing, me.basename)
+
+ if (me._processing) return
+
+ var entry = me._buffer.shift()
+ if (!entry) {
+ // console.error("DW Drain")
+ me.emit("drain")
+ if (me._ended) me._finish()
+ return
+ }
+
+ me._processing = true
+ // console.error("DW Entry", entry._path)
+
+ me.emit("entry", entry)
+
+ // ok, add this entry
+ //
+ // don't allow recursive copying
+ var p = entry
+ do {
+ if (p._path === me.root._path || p._path === me._path) {
+ // console.error("DW Exit (recursive)", entry.basename, me._path)
+ me._processing = false
+ if (entry._collected) entry.pipe()
+ return me._process()
+ }
+ } while (p = p.parent)
+
+ // console.error("DW not recursive")
+
+ // chop off the entry's root dir, replace with ours
+ var props = { parent: me
+ , root: me.root || me
+ , type: entry.type
+ , depth: me.depth + 1 }
+
+ var p = entry._path || entry.path || entry.props.path
+ if (entry.parent) {
+ p = p.substr(entry.parent._path.length + 1)
+ }
+ // get rid of any ../../ shenanigans
+ props.path = path.join(me.path, path.join("/", p))
+
+ // all the rest of the stuff, copy over from the source.
+ Object.keys(entry.props).forEach(function (k) {
+ if (!props.hasOwnProperty(k)) {
+ props[k] = entry.props[k]
+ }
+ })
+
+ // not sure at this point what kind of writer this is.
+ var child = me._currentChild = new Writer(props)
+ child.on("ready", function () {
+ // console.error("DW Child Ready", child.type, child._path)
+ // console.error(" resuming", entry._path)
+ entry.pipe(child)
+ entry.resume()
+ })
+
+ // XXX Make this work in node.
+ // Long filenames should not break stuff.
+ child.on("error", function (er) {
+ if (child._swallowErrors) {
+ me.warn(er)
+ child.emit("end")
+ child.emit("close")
+ } else {
+ me.emit("error", er)
+ }
+ })
+
+ // we fire _end internally *after* end, so that we don't move on
+ // until any "end" listeners have had their chance to do stuff.
+ child.on("close", onend)
+ var ended = false
+ function onend () {
+ if (ended) return
+ ended = true
+ // console.error("* DW Child end", child.basename)
+ me._currentChild = null
+ me._processing = false
+ me._process()
+ }
+}
diff --git a/deps/npm/node_modules/fstream/lib/file-reader.js b/deps/npm/node_modules/fstream/lib/file-reader.js
new file mode 100644
index 0000000000..b1f9861838
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/file-reader.js
@@ -0,0 +1,147 @@
+// Basically just a wrapper around an fs.ReadStream
+
+module.exports = FileReader
+
+var fs = require("graceful-fs")
+ , fstream = require("../fstream.js")
+ , Reader = fstream.Reader
+ , inherits = require("inherits")
+ , mkdir = require("mkdirp")
+ , Reader = require("./reader.js")
+ , EOF = {EOF: true}
+ , CLOSE = {CLOSE: true}
+
+inherits(FileReader, Reader)
+
+function FileReader (props) {
+ // console.error(" FR create", props.path, props.size, new Error().stack)
+ var me = this
+ if (!(me instanceof FileReader)) throw new Error(
+ "FileReader must be called as constructor.")
+
+ // should already be established as a File type
+ // XXX Todo: preserve hardlinks by tracking dev+inode+nlink,
+ // with a HardLinkReader class.
+ if (!((props.type === "Link" && props.Link) ||
+ (props.type === "File" && props.File))) {
+ throw new Error("Non-file type "+ props.type)
+ }
+
+ me._buffer = []
+ me._bytesEmitted = 0
+ Reader.call(me, props)
+}
+
+FileReader.prototype._getStream = function () {
+ var me = this
+ , stream = me._stream = fs.createReadStream(me._path, me.props)
+
+ if (me.props.blksize) {
+ stream.bufferSize = me.props.blksize
+ }
+
+ stream.on("open", me.emit.bind(me, "open"))
+
+ stream.on("data", function (c) {
+ // console.error("\t\t%d %s", c.length, me.basename)
+ me._bytesEmitted += c.length
+ // no point saving empty chunks
+ if (!c.length) return
+ else if (me._paused || me._buffer.length) {
+ me._buffer.push(c)
+ me._read()
+ } else me.emit("data", c)
+ })
+
+ stream.on("end", function () {
+ if (me._paused || me._buffer.length) {
+ // console.error("FR Buffering End", me._path)
+ me._buffer.push(EOF)
+ me._read()
+ } else {
+ me.emit("end")
+ }
+
+ if (me._bytesEmitted !== me.props.size) {
+ me.error("Didn't get expected byte count\n"+
+ "expect: "+me.props.size + "\n" +
+ "actual: "+me._bytesEmitted)
+ }
+ })
+
+ stream.on("close", function () {
+ if (me._paused || me._buffer.length) {
+ // console.error("FR Buffering Close", me._path)
+ me._buffer.push(CLOSE)
+ me._read()
+ } else {
+ // console.error("FR close 1", me._path)
+ me.emit("close")
+ }
+ })
+
+ me._read()
+}
+
+FileReader.prototype._read = function () {
+ var me = this
+ // console.error("FR _read", me._path)
+ if (me._paused) {
+ // console.error("FR _read paused", me._path)
+ return
+ }
+
+ if (!me._stream) {
+ // console.error("FR _getStream calling", me._path)
+ return me._getStream()
+ }
+
+ // clear out the buffer, if there is one.
+ if (me._buffer.length) {
+ // console.error("FR _read has buffer", me._buffer.length, me._path)
+ var buf = me._buffer
+ for (var i = 0, l = buf.length; i < l; i ++) {
+ var c = buf[i]
+ if (c === EOF) {
+ // console.error("FR Read emitting buffered end", me._path)
+ me.emit("end")
+ } else if (c === CLOSE) {
+ // console.error("FR Read emitting buffered close", me._path)
+ me.emit("close")
+ } else {
+ // console.error("FR Read emitting buffered data", me._path)
+ me.emit("data", c)
+ }
+
+ if (me._paused) {
+ // console.error("FR Read Re-pausing at "+i, me._path)
+ me._buffer = buf.slice(i)
+ return
+ }
+ }
+ me._buffer.length = 0
+ }
+ // console.error("FR _read done")
+ // that's about all there is to it.
+}
+
+FileReader.prototype.pause = function (who) {
+ var me = this
+ // console.error("FR Pause", me._path)
+ if (me._paused) return
+ who = who || me
+ me._paused = true
+ if (me._stream) me._stream.pause()
+ me.emit("pause", who)
+}
+
+FileReader.prototype.resume = function (who) {
+ var me = this
+ // console.error("FR Resume", me._path)
+ if (!me._paused) return
+ who = who || me
+ me.emit("resume", who)
+ me._paused = false
+ if (me._stream) me._stream.resume()
+ me._read()
+}
diff --git a/deps/npm/node_modules/fstream/lib/file-writer.js b/deps/npm/node_modules/fstream/lib/file-writer.js
new file mode 100644
index 0000000000..4ed0c9c6ca
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/file-writer.js
@@ -0,0 +1,95 @@
+module.exports = FileWriter
+
+var fs = require("graceful-fs")
+ , mkdir = require("mkdirp")
+ , Writer = require("./writer.js")
+ , inherits = require("inherits")
+ , EOF = {}
+
+inherits(FileWriter, Writer)
+
+function FileWriter (props) {
+ var me = this
+ if (!(me instanceof FileWriter)) throw new Error(
+ "FileWriter must be called as constructor.")
+
+ // should already be established as a File type
+ if (props.type !== "File" || !props.File) {
+ throw new Error("Non-file type "+ props.type)
+ }
+
+ me._buffer = []
+ me._bytesWritten = 0
+
+ Writer.call(this, props)
+}
+
+FileWriter.prototype._create = function () {
+ var me = this
+ if (me._stream) return
+
+ var so = {}
+ if (me.props.flags) so.flags = me.props.flags
+ so.mode = Writer.filemode
+ if (me._old && me._old.blksize) so.bufferSize = me._old.blksize
+
+ me._stream = fs.createWriteStream(me._path, so)
+
+ me._stream.on("open", function (fd) {
+ me.ready = true
+ me._buffer.forEach(function (c) {
+ if (c === EOF) me._stream.end()
+ else me._stream.write(c)
+ })
+ me.emit("ready")
+ })
+
+ me._stream.on("drain", function () { me.emit("drain") })
+
+ me._stream.on("close", function () {
+ // console.error("\n\nFW Stream Close", me._path, me.size)
+ me._finish()
+ })
+}
+
+FileWriter.prototype.write = function (c) {
+ var me = this
+
+ me._bytesWritten += c.length
+
+ if (!me.ready) {
+ me._buffer.push(c)
+ return false
+ }
+
+ var ret = me._stream.write(c)
+ // console.error("\t-- fw wrote, _stream says", ret, me._stream._queue.length)
+
+ // allow 2 buffered writes, because otherwise there's just too
+ // much stop and go bs.
+ return ret || (me._stream._queue && me._stream._queue.length <= 2)
+}
+
+FileWriter.prototype.end = function (c) {
+ var me = this
+
+ if (c) me.write(c)
+
+ if (!me.ready) {
+ me._buffer.push(EOF)
+ return false
+ }
+
+ return me._stream.end()
+}
+
+FileWriter.prototype._finish = function () {
+ var me = this
+ if (typeof me.size === "number" && me._bytesWritten != me.size) {
+ me.error(
+ "Did not get expected byte count.\n" +
+ "expect: " + me.size + "\n" +
+ "actual: " + me._bytesWritten)
+ }
+ Writer.prototype._finish.call(me)
+}
diff --git a/deps/npm/node_modules/fstream/lib/get-type.js b/deps/npm/node_modules/fstream/lib/get-type.js
new file mode 100644
index 0000000000..cd65c41d8b
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/get-type.js
@@ -0,0 +1,32 @@
+module.exports = getType
+
+function getType (st) {
+ var types =
+ [ "Directory"
+ , "File"
+ , "SymbolicLink"
+ , "Link" // special for hardlinks from tarballs
+ , "BlockDevice"
+ , "CharacterDevice"
+ , "FIFO"
+ , "Socket" ]
+ , type
+
+ if (st.type && -1 !== types.indexOf(st.type)) {
+ st[st.type] = true
+ return st.type
+ }
+
+ for (var i = 0, l = types.length; i < l; i ++) {
+ type = types[i]
+ var is = st[type] || st["is" + type]
+ if (typeof is === "function") is = is.call(st)
+ if (is) {
+ st[type] = true
+ st.type = type
+ return type
+ }
+ }
+
+ return null
+}
diff --git a/deps/npm/node_modules/fstream/lib/link-reader.js b/deps/npm/node_modules/fstream/lib/link-reader.js
new file mode 100644
index 0000000000..7e7ab6ce5c
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/link-reader.js
@@ -0,0 +1,54 @@
+// Basically just a wrapper around an fs.readlink
+//
+// XXX: Enhance this to support the Link type, by keeping
+// a lookup table of {<dev+inode>:<path>}, so that hardlinks
+// can be preserved in tarballs.
+
+module.exports = LinkReader
+
+var fs = require("graceful-fs")
+ , fstream = require("../fstream.js")
+ , inherits = require("inherits")
+ , mkdir = require("mkdirp")
+ , Reader = require("./reader.js")
+
+inherits(LinkReader, Reader)
+
+function LinkReader (props) {
+ var me = this
+ if (!(me instanceof LinkReader)) throw new Error(
+ "LinkReader must be called as constructor.")
+
+ if (!((props.type === "Link" && props.Link) ||
+ (props.type === "SymbolicLink" && props.SymbolicLink))) {
+ throw new Error("Non-link type "+ props.type)
+ }
+
+ Reader.call(me, props)
+}
+
+// When piping a LinkReader into a LinkWriter, we have to
+// already have the linkpath property set, so that has to
+// happen *before* the "ready" event, which means we need to
+// override the _stat method.
+LinkReader.prototype._stat = function (currentStat) {
+ var me = this
+ fs.readlink(me._path, function (er, linkpath) {
+ if (er) return me.error(er)
+ me.linkpath = me.props.linkpath = linkpath
+ me.emit("linkpath", linkpath)
+ Reader.prototype._stat.call(me, currentStat)
+ })
+}
+
+LinkReader.prototype._read = function () {
+ var me = this
+ if (me._paused) return
+ // basically just a no-op, since we got all the info we need
+ // from the _stat method
+ if (!me._ended) {
+ me.emit("end")
+ me.emit("close")
+ me._ended = true
+ }
+}
diff --git a/deps/npm/node_modules/fstream/lib/link-writer.js b/deps/npm/node_modules/fstream/lib/link-writer.js
new file mode 100644
index 0000000000..0ccce15094
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/link-writer.js
@@ -0,0 +1,82 @@
+
+module.exports = LinkWriter
+
+var fs = require("graceful-fs")
+ , Writer = require("./writer.js")
+ , inherits = require("inherits")
+ , collect = require("./collect.js")
+ , path = require("path")
+ , rimraf = require("rimraf")
+
+inherits(LinkWriter, Writer)
+
+function LinkWriter (props) {
+ var me = this
+ if (!(me instanceof LinkWriter)) throw new Error(
+ "LinkWriter must be called as constructor.")
+
+ // should already be established as a Link type
+ if (!((props.type === "Link" && props.Link) ||
+ (props.type === "SymbolicLink" && props.SymbolicLink))) {
+ throw new Error("Non-link type "+ props.type)
+ }
+
+ if (!props.linkpath) {
+ me.error("Need linkpath property to create " + props.type)
+ }
+
+ Writer.call(this, props)
+}
+
+LinkWriter.prototype._create = function () {
+ var me = this
+ , hard = me.type === "Link" || process.platform === "win32"
+ , link = hard ? "link" : "symlink"
+ , lp = hard ? path.resolve(me.dirname, me.linkpath) : me.linkpath
+
+ // can only change the link path by clobbering
+ // For hard links, let's just assume that's always the case, since
+ // there's no good way to read them if we don't already know.
+ if (hard) return clobber(me, lp, link)
+
+ fs.readlink(me._path, function (er, p) {
+ // only skip creation if it's exactly the same link
+ if (p && p === lp) return finish(me)
+ clobber(me, lp, link)
+ })
+}
+
+function clobber (me, lp, link) {
+ rimraf(me._path, function (er) {
+ if (er) return me.error(er)
+ create(me, lp, link)
+ })
+}
+
+function create (me, lp, link) {
+ fs[link](lp, me._path, function (er) {
+ // if this is a hard link, and we're in the process of writing out a
+ // directory, it's very possible that the thing we're linking to
+ // doesn't exist yet (especially if it was intended as a symlink),
+ // so swallow ENOENT errors here and just soldier in.
+ if (er) {
+ if (er.code === "ENOENT" && process.platform === "win32") {
+ me.ready = true
+ me.emit("ready")
+ me.emit("end")
+ me.emit("close")
+ me.end = me._finish = function () {}
+ } else return me.error(er)
+ }
+ finish(me)
+ })
+}
+
+function finish (me) {
+ me.ready = true
+ me.emit("ready")
+}
+
+LinkWriter.prototype.end = function () {
+ this._finish()
+}
diff --git a/deps/npm/node_modules/fstream/lib/proxy-reader.js b/deps/npm/node_modules/fstream/lib/proxy-reader.js
new file mode 100644
index 0000000000..e9237d60b1
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/proxy-reader.js
@@ -0,0 +1,87 @@
+// A reader for when we don't yet know what kind of thing
+// the thing is.
+
+module.exports = ProxyReader
+
+var Reader = require("./reader.js")
+ , getType = require("./get-type.js")
+ , inherits = require("inherits")
+ , fs = require("graceful-fs")
+
+inherits(ProxyReader, Reader)
+
+function ProxyReader (props) {
+ var me = this
+ if (!(me instanceof ProxyReader)) throw new Error(
+ "ProxyReader must be called as constructor.")
+
+ me.props = props
+ me._buffer = []
+ me.ready = false
+
+ Reader.call(me, props)
+}
+
+ProxyReader.prototype._stat = function () {
+ var me = this
+ , props = me.props
+ // stat the thing to see what the proxy should be.
+ , stat = props.follow ? "stat" : "lstat"
+
+ fs[stat](props.path, function (er, current) {
+ var type
+ if (er || !current) {
+ type = "File"
+ } else {
+ type = getType(current)
+ }
+
+ props[type] = true
+ props.type = me.type = type
+
+ me._old = current
+ me._addProxy(Reader(props, current))
+ })
+}
+
+ProxyReader.prototype._addProxy = function (proxy) {
+ var me = this
+ if (me._proxy) {
+ return me.error("proxy already set")
+ }
+
+ me._proxy = proxy
+ ; [ "error"
+ , "close"
+ , "data"
+ , "end"
+ , "close"
+ , "linkpath"
+ , "entry"
+ , "warn"
+ ].forEach(function (ev) {
+ proxy.on(ev, me.emit.bind(me, ev))
+ })
+
+ me.emit("proxy", proxy)
+
+ proxy.on("ready", function () {
+ // console.error("~~ proxy is ready!", me.path)
+ me.ready = true
+ me.emit("ready")
+ })
+
+ var calls = me._buffer
+ me._buffer.length = 0
+ calls.forEach(function (c) {
+ proxy[c[0]].apply(proxy, c[1])
+ })
+}
+
+ProxyReader.prototype.pause = function () {
+ return this._proxy ? this._proxy.pause() : false
+}
+
+ProxyReader.prototype.resume = function (c) {
+ return this._proxy ? this._proxy.resume() : false
+}
diff --git a/deps/npm/node_modules/fstream/lib/proxy-writer.js b/deps/npm/node_modules/fstream/lib/proxy-writer.js
new file mode 100644
index 0000000000..2c78fc6736
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/proxy-writer.js
@@ -0,0 +1,109 @@
+// A writer for when we don't know what kind of thing
+// the thing is. That is, it's not explicitly set,
+// so we're going to make it whatever the thing already
+// is, or "File"
+//
+// Until then, collect all events.
+
+module.exports = ProxyWriter
+
+var Writer = require("./writer.js")
+ , getType = require("./get-type.js")
+ , inherits = require("inherits")
+ , collect = require("./collect.js")
+ , fs = require("fs")
+
+inherits(ProxyWriter, Writer)
+
+function ProxyWriter (props) {
+ var me = this
+ if (!(me instanceof ProxyWriter)) throw new Error(
+ "ProxyWriter must be called as constructor.")
+
+ me.props = props
+ me._needDrain = false
+
+ Writer.call(me, props)
+}
+
+ProxyWriter.prototype._stat = function () {
+ var me = this
+ , props = me.props
+ // stat the thing to see what the proxy should be.
+ , stat = props.follow ? "stat" : "lstat"
+
+ fs[stat](props.path, function (er, current) {
+ var type
+ if (er || !current) {
+ type = "File"
+ } else {
+ type = getType(current)
+ }
+
+ props[type] = true
+ props.type = me.type = type
+
+ me._old = current
+ me._addProxy(Writer(props, current))
+ })
+}
+
+ProxyWriter.prototype._addProxy = function (proxy) {
+ // console.error("~~ set proxy", this.path)
+ var me = this
+ if (me._proxy) {
+ return me.error("proxy already set")
+ }
+
+ me._proxy = proxy
+ ; [ "ready"
+ , "error"
+ , "close"
+ , "pipe"
+ , "drain"
+ , "warn"
+ ].forEach(function (ev) {
+ proxy.on(ev, me.emit.bind(me, ev))
+ })
+
+ me.emit("proxy", proxy)
+
+ var calls = me._buffer
+ calls.forEach(function (c) {
+ // console.error("~~ ~~ proxy buffered call", c[0], c[1])
+ proxy[c[0]].call(proxy, c[1])
+ })
+ me._buffer.length = 0
+ if (me._needsDrain) me.emit("drain")
+}
+
+ProxyWriter.prototype.add = function (entry) {
+ // console.error("~~ proxy add")
+ collect(entry)
+
+ if (!this._proxy) {
+ this._buffer.push(["add", [entry]])
+ this._needDrain = true
+ return false
+ }
+ return this._proxy.add(entry)
+}
+
+ProxyWriter.prototype.write = function (c) {
+ // console.error("~~ proxy write")
+ if (!this._proxy) {
+ this._buffer.push(["write", [c]])
+ this._needDrain = true
+ return false
+ }
+ return this._proxy.write(c)
+}
+
+ProxyWriter.prototype.end = function (c) {
+ // console.error("~~ proxy end")
+ if (!this._proxy) {
+ this._buffer.push(["end", c])
+ return false
+ }
+ return this._proxy.end(c)
+}
diff --git a/deps/npm/node_modules/fstream/lib/reader.js b/deps/npm/node_modules/fstream/lib/reader.js
new file mode 100644
index 0000000000..592ec6e854
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/reader.js
@@ -0,0 +1,234 @@
+
+module.exports = Reader
+
+var fs = require("graceful-fs")
+ , Stream = require("stream").Stream
+ , inherits = require("inherits")
+ , path = require("path")
+ , getType = require("./get-type.js")
+ , hardLinks = Reader.hardLinks = {}
+ , Abstract = require("./abstract.js")
+
+// Must do this *before* loading the child classes
+inherits(Reader, Abstract)
+
+var DirReader = require("./dir-reader.js")
+ , FileReader = require("./file-reader.js")
+ , LinkReader = require("./link-reader.js")
+ , ProxyReader = require("./proxy-reader.js")
+
+function Reader (props, currentStat) {
+ var me = this
+ if (!(me instanceof Reader)) return new Reader(props, currentStat)
+
+ if (typeof props === "string") {
+ props = { path: props }
+ }
+
+ if (!props.path) {
+ me.error("Must provide a path", null, true)
+ }
+
+ // polymorphism.
+ // call fstream.Reader(dir) to get a DirReader object, etc.
+ // Note that, unlike in the Writer case, ProxyReader is going
+ // to be the *normal* state of affairs, since we rarely know
+ // the type of a file prior to reading it.
+
+
+ var type
+ , ClassType
+
+ if (props.type && typeof props.type === "function") {
+ type = props.type
+ ClassType = type
+ } else {
+ type = getType(props)
+ ClassType = Reader
+ }
+
+ if (currentStat && !type) {
+ type = getType(currentStat)
+ props[type] = true
+ props.type = type
+ }
+
+ switch (type) {
+ case "Directory":
+ ClassType = DirReader
+ break
+
+ case "Link":
+ // XXX hard links are just files.
+ // However, it would be good to keep track of files' dev+inode
+ // and nlink values, and create a HardLinkReader that emits
+ // a linkpath value of the original copy, so that the tar
+ // writer can preserve them.
+ // ClassType = HardLinkReader
+ // break
+
+ case "File":
+ ClassType = FileReader
+ break
+
+ case "SymbolicLink":
+ ClassType = LinkReader
+ break
+
+ case null:
+ ClassType = ProxyReader
+ break
+ }
+
+ if (!(me instanceof ClassType)) {
+ return new ClassType(props)
+ }
+
+ Abstract.call(me)
+
+ me.readable = true
+ me.writable = false
+
+ me.type = type
+ me.props = props
+ me.depth = props.depth = props.depth || 0
+ me.parent = props.parent || null
+ me.root = props.root || (props.parent && props.parent.root) || me
+
+ me.basename = props.basename = path.basename(me.path)
+ me.dirname = props.dirname = path.dirname(me.path)
+ me._path = me.path = path.resolve(props.path)
+ if (process.platform === "win32") {
+ me.path = me._path = me.path.replace(/\?/g, "_")
+ if (me._path.length >= 260) {
+ // how DOES one create files on the moon?
+ // if the path has spaces in it, then UNC will fail.
+ me._swallowErrors = true
+ //if (me._path.indexOf(" ") === -1) {
+ me._path = "\\\\?\\" + me.path.replace(/\//g, "\\")
+ //}
+ }
+ }
+
+ // these have served their purpose, and are now just noisy clutter
+ props.parent = props.root = null
+
+ // console.error("\n\n\n%s setting size to", props.path, props.size)
+ me.size = props.size
+ me.filter = typeof props.filter === "function" ? props.filter : null
+ if (props.sort === "alpha") props.sort = alphasort
+
+ // start the ball rolling.
+ // this will stat the thing, and then call me._read()
+ // to start reading whatever it is.
+ // console.error("calling stat", props.path, currentStat)
+ me._stat(currentStat)
+}
+
+function alphasort (a, b) {
+ return a === b ? 0
+ : a.toLowerCase() > b.toLowerCase() ? 1
+ : a.toLowerCase() < b.toLowerCase() ? -1
+ : a > b ? 1
+ : -1
+}
+
+Reader.prototype._stat = function (currentStat) {
+ var me = this
+ , props = me.props
+ , stat = props.follow ? "stat" : "lstat"
+
+ // console.error("Reader._stat", me._path, currentStat)
+ if (currentStat) process.nextTick(statCb.bind(null, null, currentStat))
+ else fs[stat](me._path, statCb)
+
+
+ function statCb (er, props_) {
+ // console.error("Reader._stat, statCb", me._path, props_, props_.nlink)
+ if (er) return me.error(er)
+
+ Object.keys(props_).forEach(function (k) {
+ props[k] = props_[k]
+ })
+
+ // if it's not the expected size, then abort here.
+ if (undefined !== me.size && props.size !== me.size) {
+ return me.error("incorrect size")
+ }
+ me.size = props.size
+
+ var type = getType(props)
+ // special little thing for handling hardlinks.
+ if (type !== "Directory" && props.nlink && props.nlink > 1) {
+ var k = props.dev + ":" + props.ino
+ // console.error("Reader has nlink", me._path, k)
+ if (hardLinks[k] === me._path || !hardLinks[k]) hardLinks[k] = me._path
+ else {
+ // switch into hardlink mode.
+ type = me.type = me.props.type = "Link"
+ me.Link = me.props.Link = true
+ me.linkpath = me.props.linkpath = hardLinks[k]
+ // console.error("Hardlink detected, switching mode", me._path, me.linkpath)
+ // Setting __proto__ would arguably be the "correct"
+ // approach here, but that just seems too wrong.
+ me._stat = me._read = LinkReader.prototype._read
+ }
+ }
+
+ if (me.type && me.type !== type) {
+ me.error("Unexpected type: " + type)
+ }
+
+ // if the filter doesn't pass, then just skip over this one.
+ // still have to emit end so that dir-walking can move on.
+ if (me.filter) {
+ if (!me.filter()) {
+ me._aborted = true
+ me.emit("end")
+ me.emit("close")
+ return
+ }
+ }
+
+ me.emit("ready", props)
+
+ // if it's a directory, then we'll be emitting "entry" events.
+ me._read()
+ }
+}
+
+Reader.prototype.pipe = function (dest, opts) {
+ var me = this
+ if (typeof dest.add === "function") {
+ // piping to a multi-compatible, and we've got directory entries.
+ me.on("entry", function (entry) {
+ var ret = dest.add(entry)
+ if (false === ret) {
+ me.pause()
+ }
+ })
+ }
+
+ // console.error("R Pipe apply Stream Pipe")
+ return Stream.prototype.pipe.apply(this, arguments)
+}
+
+Reader.prototype.pause = function (who) {
+ this._paused = true
+ who = who || this
+ this.emit("pause", who)
+ if (this._stream) this._stream.pause(who)
+}
+
+Reader.prototype.resume = function (who) {
+ this._paused = false
+ who = who || this
+ this.emit("resume", who)
+ if (this._stream) this._stream.resume(who)
+ this._read()
+}
+
+Reader.prototype._read = function () {
+ me.warn("Cannot read unknown type: "+me.type)
+}
+
diff --git a/deps/npm/node_modules/fstream/lib/writer.js b/deps/npm/node_modules/fstream/lib/writer.js
new file mode 100644
index 0000000000..f280963aa1
--- /dev/null
+++ b/deps/npm/node_modules/fstream/lib/writer.js
@@ -0,0 +1,324 @@
+
+module.exports = Writer
+
+var fs = require("graceful-fs")
+ , inherits = require("inherits")
+ , rimraf = require("rimraf")
+ , mkdir = require("mkdirp")
+ , path = require("path")
+ , umask = process.umask()
+ , getType = require("./get-type.js")
+ , Abstract = require("./abstract.js")
+
+// Must do this *before* loading the child classes
+inherits(Writer, Abstract)
+
+Writer.dirmode = 0777 & (~umask)
+Writer.filemode = 0666 & (~umask)
+
+var DirWriter = require("./dir-writer.js")
+ , LinkWriter = require("./link-writer.js")
+ , FileWriter = require("./file-writer.js")
+ , ProxyWriter = require("./proxy-writer.js")
+
+// props is the desired state. current is optionally the current stat,
+// provided here so that subclasses can avoid statting the target
+// more than necessary.
+function Writer (props, current) {
+ var me = this
+
+ if (typeof props === "string") {
+ props = { path: props }
+ }
+
+ if (!props.path) me.error("Must provide a path", null, true)
+
+ // polymorphism.
+ // call fstream.Writer(dir) to get a DirWriter object, etc.
+ var type = getType(props)
+ , ClassType = Writer
+
+ switch (type) {
+ case "Directory":
+ ClassType = DirWriter
+ break
+ case "File":
+ ClassType = FileWriter
+ break
+ case "Link":
+ case "SymbolicLink":
+ ClassType = LinkWriter
+ break
+ case null:
+ // Don't know yet what type to create, so we wrap in a proxy.
+ ClassType = ProxyWriter
+ break
+ }
+
+ if (!(me instanceof ClassType)) return new ClassType(props)
+
+ // now get down to business.
+
+ Abstract.call(me)
+
+ // props is what we want to set.
+ // set some convenience properties as well.
+ me.type = props.type
+ me.props = props
+ me.depth = props.depth || 0
+ me.clobber = false === props.clobber ? props.clobber : true
+ me.parent = props.parent || null
+ me.root = props.root || (props.parent && props.parent.root) || me
+
+ me.basename = path.basename(props.path)
+ me.dirname = path.dirname(props.path)
+ me.linkpath = props.linkpath || null
+ me._path = me.path = path.resolve(props.path)
+ if (process.platform === "win32") {
+ me.path = me._path = me.path.replace(/\?/g, "_")
+ if (me._path.length >= 260) {
+ me._swallowErrors = true
+ //if (me._path.indexOf(" ") === -1) {
+ me._path = "\\\\?\\" + me.path.replace(/\//g, "\\")
+ //}
+ }
+ }
+
+ props.parent = props.root = null
+
+ // console.error("\n\n\n%s setting size to", props.path, props.size)
+ me.size = props.size
+
+ if (typeof props.mode === "string") {
+ props.mode = parseInt(props.mode, 8)
+ }
+
+ me.readable = false
+ me.writable = true
+
+ // buffer until ready, or while handling another entry
+ me._buffer = []
+ me.ready = false
+
+ // start the ball rolling.
+ // this checks what's there already, and then calls
+ // me._create() to call the impl-specific creation stuff.
+ me._stat(current)
+}
+
+// Calling this means that it's something we can't create.
+// Just assert that it's already there, otherwise raise a warning.
+Writer.prototype._create = function () {
+ var me = this
+ fs[me.props.follow ? "stat" : "lstat"](me._path, function (er, current) {
+ if (er) {
+ return me.warn("Cannot create " + me._path + "\n" +
+ "Unsupported type: "+me.type, "ENOTSUP")
+ }
+ me._finish()
+ })
+}
+
+Writer.prototype._stat = function (current) {
+ var me = this
+ , props = me.props
+ , stat = props.follow ? "stat" : "lstat"
+
+ if (current) statCb(null, current)
+ else fs[stat](me._path, statCb)
+
+ function statCb (er, current) {
+ // if it's not there, great. We'll just create it.
+ // if it is there, then we'll need to change whatever differs
+ if (er || !current) {
+ return create(me)
+ }
+
+ me._old = current
+ var currentType = getType(current)
+
+ // if it's a type change, then we need to clobber or error.
+ // if it's not a type change, then let the impl take care of it.
+ if (currentType !== me.type) {
+ return rimraf(me._path, function (er) {
+ if (er) return me.error(er)
+ me._old = null
+ create(me)
+ })
+ }
+
+ // otherwise, just handle in the app-specific way
+ // this creates a fs.WriteStream, or mkdir's, or whatever
+ me._create()
+ }
+}
+
+function create (me) {
+ // XXX Need to clobber non-dirs that are in the way,
+ // unless { clobber: false } in the props.
+ mkdir(path.dirname(me._path), Writer.dirmode, function (er) {
+ if (er) return me.error(er)
+ me._create()
+ })
+}
+
+Writer.prototype._finish = function () {
+ var me = this
+
+ // console.error(" W Finish", me._path, me.size)
+
+ // set up all the things.
+ // At this point, we're already done writing whatever we've gotta write,
+ // adding files to the dir, etc.
+ var todo = 0
+ var errState = null
+ var done = false
+
+ if (me._old) {
+ // the times will almost *certainly* have changed.
+ // adds the utimes syscall, but remove another stat.
+ me._old.atime = new Date(0)
+ me._old.mtime = new Date(0)
+ // console.error(" W Finish Stale Stat", me._path, me.size)
+ setProps(me._old)
+ } else {
+ var stat = me.props.follow ? "stat" : "lstat"
+ // console.error(" W Finish Stating", me._path, me.size)
+ fs[stat](me._path, function (er, current) {
+ // console.error(" W Finish Stated", me._path, me.size, current)
+ if (er) {
+ // if we're in the process of writing out a
+ // directory, it's very possible that the thing we're linking to
+ // doesn't exist yet (especially if it was intended as a symlink),
+ // so swallow ENOENT errors here and just soldier in.
+ if (er.code === "ENOENT" &&
+ (me.type === "Link" || me.type === "SymbolicLink") &&
+ process.platform === "win32") {
+ me.ready = true
+ me.emit("ready")
+ me.emit("end")
+ me.emit("close")
+ me.end = me._finish = function () {}
+ return
+ } else return me.error(er)
+ }
+ setProps(me._old = current)
+ })
+ }
+
+ return
+
+ function setProps (current) {
+ // console.error(" W setprops", me._path)
+ // mode
+ var wantMode = me.props.mode
+ , chmod = me.props.follow || me.type === "Directory"
+ ? "chmod" : "lchmod"
+
+ if (fs[chmod] && typeof wantMode === "number") {
+ wantMode = wantMode & 0777
+ todo ++
+ // console.error(" W chmod", wantMode.toString(8), me.basename)
+ fs[chmod](me._path, wantMode, next(chmod))
+ }
+
+ // uid, gid
+ // Don't even try it unless root. Too easy to EPERM.
+ if (process.platform !== "win32" &&
+ process.getuid &&
+ process.getuid() === 0 &&
+ fs.chown &&
+ ( typeof me.props.uid === "number" ||
+ typeof me.props.gid === "number")) {
+ if (typeof me.props.uid !== "number") me.props.uid = current.uid
+ if (typeof me.props.gid !== "number") me.props.gid = current.gid
+ if (me.props.uid !== current.uid || me.props.gid !== current.gid) {
+ todo ++
+ // console.error(" W chown", me.props.uid, me.props.gid, me.basename)
+ fs.chown(me._path, me.props.uid, me.props.gid, next("chown"))
+ }
+ }
+
+ // atime, mtime.
+ if (fs.utimes &&
+ !(process.platform === "win32" && me.type === "Directory")) {
+ var utimes = (me.props.follow || me.type !== "SymbolicLink")
+ ? "utimes" : "lutimes"
+
+ if (utimes === "lutimes" && !fs[utimes]) {
+ if (!fs.futimes) fs.ltimes = function (a, b, c, cb) { return cb() }
+ else fs.lutimes = function (path, atime, mtime, cb) {
+ var c = require("constants")
+ fs.open(path, c.O_SYMLINK, function (er, fd) {
+ if (er) return cb(er)
+ fs.futimes(fd, atime, mtime, function (er) {
+ if (er) return cb(er)
+ fs.close(fd, cb)
+ })
+ })
+ }
+ }
+
+ var curA = current.atime
+ , curM = current.mtime
+ , meA = me.props.atime
+ , meM = me.props.mtime
+
+ if (meA === undefined) meA = curA
+ if (meM === undefined) meM = curM
+
+ if (!isDate(meA)) meA = new Date(meA)
+ if (!isDate(meM)) meA = new Date(meM)
+
+ if (meA.getTime() !== curA.getTime() ||
+ meM.getTime() !== curM.getTime()) {
+ todo ++
+ // console.error(" W utimes", meA, meM, me.basename)
+ fs[utimes](me._path, meA, meM, next("utimes"))
+ }
+ }
+
+ // finally, handle the case if there was nothing to do.
+ if (todo === 0) {
+ // console.error(" W nothing to do", me.basename)
+ next("nothing to do")()
+ }
+ }
+
+ function next (what) { return function (er) {
+ // console.error(" W Finish", what, todo)
+ if (errState) return
+ if (er) {
+ er.fstream_finish_call = what
+ return me.error(errState = er)
+ }
+ if (--todo > 0) return
+ if (done) return
+ done = true
+
+ // all the props have been set, so we're completely done.
+ me.emit("end")
+ me.emit("close")
+ }}
+}
+
+Writer.prototype.pipe = function () {
+ this.error("Can't pipe from writable stream")
+}
+
+Writer.prototype.add = function () {
+ this.error("Cannot add to non-Directory type")
+}
+
+Writer.prototype.write = function () {
+ return true
+}
+
+function objectToString (d) {
+ return Object.prototype.toString.call(d)
+}
+
+function isDate(d) {
+ return typeof d === 'object' && objectToString(d) === '[object Date]';
+}
+
diff --git a/deps/npm/node_modules/fstream/package.json b/deps/npm/node_modules/fstream/package.json
new file mode 100644
index 0000000000..22d5821f62
--- /dev/null
+++ b/deps/npm/node_modules/fstream/package.json
@@ -0,0 +1,21 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "name": "fstream",
+ "description": "Advanced file system stream things",
+ "version": "0.0.1",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/fstream.git"
+ },
+ "main": "fstream.js",
+ "engines": {
+ "node": "0.5 || 0.6"
+ },
+ "dependencies": {
+ "rimraf": "~1.0.8",
+ "mkdirp": "~0.1.0",
+ "graceful-fs": "~1.0.1",
+ "inherits": "~1.0.0"
+ },
+ "devDependencies": {}
+}
diff --git a/deps/npm/node_modules/graceful-fs/LICENSE b/deps/npm/node_modules/graceful-fs/LICENSE
new file mode 100644
index 0000000000..05a4010949
--- /dev/null
+++ b/deps/npm/node_modules/graceful-fs/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/graceful-fs/README.md b/deps/npm/node_modules/graceful-fs/README.md
new file mode 100644
index 0000000000..7d2e681e32
--- /dev/null
+++ b/deps/npm/node_modules/graceful-fs/README.md
@@ -0,0 +1,5 @@
+Just like node's `fs` module, but it does an incremental back-off when
+EMFILE is encountered.
+
+Useful in asynchronous situations where one needs to try to open lots
+and lots of files.
diff --git a/deps/npm/node_modules/graceful-fs/graceful-fs.js b/deps/npm/node_modules/graceful-fs/graceful-fs.js
new file mode 100644
index 0000000000..08e99b24b0
--- /dev/null
+++ b/deps/npm/node_modules/graceful-fs/graceful-fs.js
@@ -0,0 +1,39 @@
+// wrapper around the non-sync fs functions to gracefully handle
+// having too many file descriptors open. Note that this is
+// *only* possible because async patterns let one interject timeouts
+// and other cleverness anywhere in the process without disrupting
+// anything else.
+var fs = require("fs")
+ , timeout = 0
+
+Object.keys(fs)
+ .forEach(function (i) {
+ exports[i] = (typeof fs[i] !== "function") ? fs[i]
+ : (i.match(/^[A-Z]|^create|Sync$/)) ? function () {
+ return fs[i].apply(fs, arguments)
+ }
+ : graceful(fs[i])
+ })
+
+if (process.platform === "win32"
+ && !process.binding("fs").lstat) {
+ exports.lstat = exports.stat
+ exports.lstatSync = exports.statSync
+}
+
+function graceful (fn) { return function GRACEFUL () {
+ var args = Array.prototype.slice.call(arguments)
+ , cb_ = args.pop()
+ args.push(cb)
+ function cb (er) {
+ if (er && er.message.match(/^EMFILE, Too many open files/)) {
+ setTimeout(function () {
+ GRACEFUL.apply(fs, args)
+ }, timeout ++)
+ return
+ }
+ timeout = 0
+ cb_.apply(null, arguments)
+ }
+ fn.apply(fs, args)
+}}
diff --git a/deps/npm/node_modules/graceful-fs/package.json b/deps/npm/node_modules/graceful-fs/package.json
new file mode 100644
index 0000000000..934829c5cc
--- /dev/null
+++ b/deps/npm/node_modules/graceful-fs/package.json
@@ -0,0 +1,16 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
+ "name": "graceful-fs",
+ "description": "fs with incremental backoff on EMFILE",
+ "version": "1.0.1",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-graceful-fs.git"
+ },
+ "main": "graceful-fs.js",
+ "engines": {
+ "node": "0.4 || 0.5 || 0.6"
+ },
+ "dependencies": {},
+ "devDependencies": {}
+}
diff --git a/deps/npm/node_modules/inherits/LICENSE b/deps/npm/node_modules/inherits/LICENSE
new file mode 100644
index 0000000000..c78c4f6618
--- /dev/null
+++ b/deps/npm/node_modules/inherits/LICENSE
@@ -0,0 +1,26 @@
+Copyright 2011 Isaac Z. Schlueter (the "Author")
+All rights reserved.
+
+General Public Obviousness License
+
+The Author asserts that this software and associated documentation
+files (the "Software"), while the Author's original creation, is
+nonetheless obvious, trivial, unpatentable, and implied by the
+context in which the software was created. If you sat down and
+thought about the problem for an hour or less, you'd probably
+come up with exactly this solution.
+
+Permission is granted to use this software in any way
+whatsoever, with the following restriction:
+
+You may not release the Software under a more restrictive license
+than this one.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/inherits/README.md b/deps/npm/node_modules/inherits/README.md
new file mode 100644
index 0000000000..b2beaed93a
--- /dev/null
+++ b/deps/npm/node_modules/inherits/README.md
@@ -0,0 +1,51 @@
+A dead simple way to do inheritance in JS.
+
+ var inherits = require("inherits")
+
+ function Animal () {
+ this.alive = true
+ }
+ Animal.prototype.say = function (what) {
+ console.log(what)
+ }
+
+ inherits(Dog, Animal)
+ function Dog () {
+ Dog.super.apply(this)
+ }
+ Dog.prototype.sniff = function () {
+ this.say("sniff sniff")
+ }
+ Dog.prototype.bark = function () {
+ this.say("woof woof")
+ }
+
+ inherits(Chihuahua, Dog)
+ function Chihuahua () {
+ Chihuahua.super.apply(this)
+ }
+ Chihuahua.prototype.bark = function () {
+ this.say("yip yip")
+ }
+
+ // also works
+ function Cat () {
+ Cat.super.apply(this)
+ }
+ Cat.prototype.hiss = function () {
+ this.say("CHSKKSS!!")
+ }
+ inherits(Cat, Animal, {
+ meow: function () { this.say("miao miao") }
+ })
+ Cat.prototype.purr = function () {
+ this.say("purr purr")
+ }
+
+
+ var c = new Chihuahua
+ assert(c instanceof Chihuahua)
+ assert(c instanceof Dog)
+ assert(c instanceof Animal)
+
+The actual function is laughably small. 10-lines small.
diff --git a/deps/npm/node_modules/inherits/inherits-old.js b/deps/npm/node_modules/inherits/inherits-old.js
new file mode 100644
index 0000000000..ef39252dd1
--- /dev/null
+++ b/deps/npm/node_modules/inherits/inherits-old.js
@@ -0,0 +1,40 @@
+// This is a less perfect implementation of the inherits function,
+// designed to work in cases where ES5 is not available.
+//
+// Note that it is a bit longer, and doesn't properly deal with
+// getter/setters or property descriptor flags (enumerable, etc.)
+
+module.exports = inheritsOld
+
+function inheritsOld (c, p, proto) {
+ function F () { this.constructor = c }
+ F.prototype = p.prototype
+ var e = {}
+ for (var i in c.prototype) if (c.prototype.hasOwnProperty(i)) {
+ e[i] = c.prototype[i]
+ }
+ if (proto) for (var i in proto) if (proto.hasOwnProperty(i)) {
+ e[i] = proto[i]
+ }
+ c.prototype = new F()
+ for (var i in e) if (e.hasOwnProperty(i)) {
+ c.prototype[i] = e[i]
+ }
+ c.super = p
+}
+
+// function Child () {
+// Child.super.call(this)
+// console.error([this
+// ,this.constructor
+// ,this.constructor === Child
+// ,this.constructor.super === Parent
+// ,Object.getPrototypeOf(this) === Child.prototype
+// ,Object.getPrototypeOf(Object.getPrototypeOf(this))
+// === Parent.prototype
+// ,this instanceof Child
+// ,this instanceof Parent])
+// }
+// function Parent () {}
+// inheritsOld(Child, Parent)
+// new Child
diff --git a/deps/npm/node_modules/inherits/inherits.js b/deps/npm/node_modules/inherits/inherits.js
new file mode 100644
index 0000000000..061b396207
--- /dev/null
+++ b/deps/npm/node_modules/inherits/inherits.js
@@ -0,0 +1,29 @@
+module.exports = inherits
+
+function inherits (c, p, proto) {
+ proto = proto || {}
+ var e = {}
+ ;[c.prototype, proto].forEach(function (s) {
+ Object.getOwnPropertyNames(s).forEach(function (k) {
+ e[k] = Object.getOwnPropertyDescriptor(s, k)
+ })
+ })
+ c.prototype = Object.create(p.prototype, e)
+ c.super = p
+}
+
+//function Child () {
+// Child.super.call(this)
+// console.error([this
+// ,this.constructor
+// ,this.constructor === Child
+// ,this.constructor.super === Parent
+// ,Object.getPrototypeOf(this) === Child.prototype
+// ,Object.getPrototypeOf(Object.getPrototypeOf(this))
+// === Parent.prototype
+// ,this instanceof Child
+// ,this instanceof Parent])
+//}
+//function Parent () {}
+//inherits(Child, Parent)
+//new Child
diff --git a/deps/npm/node_modules/inherits/package.json b/deps/npm/node_modules/inherits/package.json
new file mode 100644
index 0000000000..7dc32771b9
--- /dev/null
+++ b/deps/npm/node_modules/inherits/package.json
@@ -0,0 +1,8 @@
+{ "name" : "inherits"
+, "description": "A tiny simple way to do classic inheritance in js"
+, "version" : "1.0.0"
+, "keywords" : ["inheritance", "class", "klass", "oop", "object-oriented"]
+, "main" : "./inherits.js"
+, "repository" : "https://github.com/isaacs/inherits"
+, "license": { "type": "GPOL", "url": "https://raw.github.com/isaacs/inherits/master/LICENSE" }
+, "author" : "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)" }
diff --git a/deps/npm/node_modules/ini/LICENSE b/deps/npm/node_modules/ini/LICENSE
new file mode 100644
index 0000000000..05a4010949
--- /dev/null
+++ b/deps/npm/node_modules/ini/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/ini/README.md b/deps/npm/node_modules/ini/README.md
new file mode 100644
index 0000000000..cd434b7ae7
--- /dev/null
+++ b/deps/npm/node_modules/ini/README.md
@@ -0,0 +1,71 @@
+An ini format parser and serializer for node.
+
+Sections are treated as nested objects. Items before the first heading
+are saved on the object directly.
+
+## Usage
+
+Consider an ini-file `config.ini` that looks like this:
+
+ ; this comment is beeing ignored
+ scope = global
+
+ [database]
+ user = dbuser
+ password = dbpassword
+ database = use_this_database
+
+ [paths.default]
+ datadir = /var/lib/data
+
+You can read, manipulate and write the ini-file like so:
+
+ var fs = require('fs')
+ , ini = require('ini')
+
+ var config = ini.parse(fs.readFileSync('./config.ini', 'utf-8'))
+
+ config.scope = 'local'
+ config.database.database = 'use_another_database'
+ config.paths.default.tmpdir = '/tmp'
+ delete config.paths.default.datadir
+
+ fs.writeFileSync('./config_modified.ini', ini.stringify(config, 'section'))
+
+This will result in a file called `config_modified.ini` being written to the filesystem with the following content:
+
+ [section]
+ scope = local
+ [section.database]
+ user = dbuser
+ password = dbpassword
+ database = use_another_database
+ [section.paths.default]
+ tmpdir = /tmp
+
+## API
+
+### decode(inistring)
+Decode the ini-style formatted `inistring` into a nested object.
+
+### parse(inistring)
+Alias for `decode(inistring)`
+
+### encode(object, [section])
+Encode the object `object` into an ini-style formatted string. If the optional parameter `section` is given, then all top-level properties of the object are put into this section and the `section`-string is prepended to all sub-sections, see the usage example above.
+
+### stringify(object, [section])
+Alias for `encode(object, [section])`
+
+### safe(val)
+Escapes the string `val` such that it is safe to be used as a key or value in an ini-file. Basically escapes quotes. For example
+
+ ini.safe('"unsafe string"')
+
+would result in
+
+ "\"unsafe string\""
+
+### unsafe(val)
+Unescapes the string `val`
+
diff --git a/deps/npm/node_modules/ini/ini.js b/deps/npm/node_modules/ini/ini.js
new file mode 100644
index 0000000000..468511a014
--- /dev/null
+++ b/deps/npm/node_modules/ini/ini.js
@@ -0,0 +1,97 @@
+
+exports.parse = exports.decode = decode
+exports.stringify = exports.encode = encode
+
+exports.safe = safe
+exports.unsafe = unsafe
+
+function encode (obj, section) {
+ var children = []
+ , out = ""
+
+ Object.keys(obj).forEach(function (k, _, __) {
+ var val = obj[k]
+ if (val && typeof val === "object") {
+ children.push(k)
+ } else {
+ out += safe(k) + " = " + safe(val) + "\n"
+ }
+ })
+
+ if (section && out.length) {
+ out = "[" + safe(section) + "]" + "\n" + out
+ }
+
+ children.forEach(function (k, _, __) {
+ out += encode(obj[k], (section ? section + "." : "") + k)
+ })
+
+ return out
+}
+
+function decode (str) {
+ var out = {}
+ , p = out
+ , section = null
+ , state = "START"
+ // section |key = value
+ , re = /^\[([^\]]*)\]$|^([^=]+)(=(.*))?$/i
+ , lines = str.split(/[\r\n]+/g)
+ , section = null
+
+ lines.forEach(function (line, _, __) {
+ //line = line
+ var rem = line.indexOf(";")
+ if (rem !== -1) line = line.substr(0, rem)//.trim()
+ if (!line) return
+ var match = line.match(re)
+ if (!match) return
+ if (match[1] !== undefined) {
+ section = unsafe(match[1])
+ p = out[section] = out[section] || {}
+ return
+ }
+ var key = unsafe(match[2])
+ , value = match[3] ? unsafe((match[4] || "")) : true
+ p[key] = value
+ })
+
+ // {a:{y:1},"a.b":{x:2}} --> {a:{y:1,b:{x:2}}}
+ // use a filter to return the keys that have to be deleted.
+ Object.keys(out).filter(function (k, _, __) {
+ if (!out[k] || typeof out[k] !== "object") return false
+ // see if the parent section is also an object.
+ // if so, add it to that, and mark this one for deletion
+ var parts = k.split(".")
+ , p = out
+ , l = parts.pop()
+ parts.forEach(function (part, _, __) {
+ if (!p[part] || typeof p[part] !== "object") p[part] = {}
+ p = p[part]
+ })
+ if (p === out) return false
+ p[l] = out[k]
+ return true
+ }).forEach(function (del, _, __) {
+ delete out[del]
+ })
+
+ return out
+}
+
+function safe (val) {
+ return ( typeof val !== "string"
+ || val.match(/[\r\n]/)
+ || (val.length > 1
+ && val.charAt(0) === "\""
+ && val.slice(-1) === "\"")
+ || val !== val.trim() ) ? JSON.stringify(val) : val
+}
+
+function unsafe (val) {
+ val = (val || "").trim()
+ if (val.charAt(0) === "\"" && val.slice(-1) === "\"") {
+ try { val = JSON.parse(val) } catch (_) {}
+ }
+ return val
+}
diff --git a/deps/npm/node_modules/ini/package.json b/deps/npm/node_modules/ini/package.json
new file mode 100644
index 0000000000..6bf81b8fbc
--- /dev/null
+++ b/deps/npm/node_modules/ini/package.json
@@ -0,0 +1,24 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "name": "ini",
+ "description": "An ini encoder/decoder for node",
+ "version": "1.0.1",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/ini.git"
+ },
+ "main": "ini.js",
+ "scripts": {
+ "test": "node ini.js"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "dependencies": {},
+ "devDependencies": {
+ "tap": "~0.0.9"
+ },
+ "scripts": {
+ "test": "tap test/*.js"
+ }
+}
diff --git a/deps/npm/node_modules/ini/test/fixtures/foo.ini b/deps/npm/node_modules/ini/test/fixtures/foo.ini
new file mode 100644
index 0000000000..aa4b177a01
--- /dev/null
+++ b/deps/npm/node_modules/ini/test/fixtures/foo.ini
@@ -0,0 +1,18 @@
+o = p
+
+ a with spaces = b c
+
+; wrap in quotes to JSON-decode and preserve spaces
+" xa n p " = "\"\r\nyoyoyo\r\r\n"
+
+; a section
+[a]
+av = a val
+e = { o: p, a: { av: a val, b: { c: { e: "this value" } } } }
+j = "{ o: "p", a: { av: "a val", b: { c: { e: "this value" } } } }"
+
+; nested child without middle parent
+; should create otherwise-empty a.b
+[a.b.c]
+e = 1
+j = 2
diff --git a/deps/npm/node_modules/ini/test/foo.js b/deps/npm/node_modules/ini/test/foo.js
new file mode 100644
index 0000000000..31af10320f
--- /dev/null
+++ b/deps/npm/node_modules/ini/test/foo.js
@@ -0,0 +1,40 @@
+var i = require("../")
+ , tap = require("tap")
+ , test = tap.test
+ , fs = require("fs")
+ , path = require("path")
+ , fixture = path.resolve(__dirname, "./fixtures/foo.ini")
+ , data = fs.readFileSync(fixture, "utf8")
+ , d
+ , expectE = 'o = p\n'
+ + 'a with spaces = b c\n'
+ + '" xa n p " = "\\"\\r\\nyoyoyo\\r\\r\\n"\n'
+ + '[a]\n'
+ + 'av = a val\n'
+ + 'e = { o: p, a: '
+ + '{ av: a val, b: { c: { e: "this value" '
+ + '} } } }\nj = "\\"{ o: \\"p\\", a: { av:'
+ + ' \\"a val\\", b: { c: { e: \\"this value'
+ + '\\" } } } }\\""\n[a.b.c]\ne = 1\nj = 2\n'
+ , expectD =
+ { o: 'p',
+ 'a with spaces': 'b c',
+ " xa n p ":'"\r\nyoyoyo\r\r\n',
+ a:
+ { av: 'a val',
+ e: '{ o: p, a: { av: a val, b: { c: { e: "this value" } } } }',
+ j: '"{ o: "p", a: { av: "a val", b: { c: { e: "this value" } } } }"',
+ b: { c: { e: '1', j: '2' } } }
+ }
+
+test("decode from file", function (t) {
+ d = i.decode(data)
+ t.deepEqual(d, expectD)
+ t.end()
+})
+
+test("encode from data", function (t) {
+ e = i.encode(expectD)
+ t.deepEqual(e, expectE)
+ t.end()
+})
diff --git a/deps/npm/node_modules/minimatch/.gitmodules b/deps/npm/node_modules/minimatch/.gitmodules
new file mode 100644
index 0000000000..7e29c80651
--- /dev/null
+++ b/deps/npm/node_modules/minimatch/.gitmodules
@@ -0,0 +1,3 @@
+[submodule "node_modules/lru-cache"]
+ path = node_modules/lru-cache
+ url = https://github.com/isaacs/node-lru-cache.git
diff --git a/deps/npm/node_modules/minimatch/LICENSE b/deps/npm/node_modules/minimatch/LICENSE
new file mode 100644
index 0000000000..05a4010949
--- /dev/null
+++ b/deps/npm/node_modules/minimatch/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/minimatch/README.md b/deps/npm/node_modules/minimatch/README.md
new file mode 100644
index 0000000000..f17ec599d5
--- /dev/null
+++ b/deps/npm/node_modules/minimatch/README.md
@@ -0,0 +1,107 @@
+This is the matching library used internally by npm.
+
+Eventually, it will replace the C binding in node-glob.
+
+It works by converting glob expressions into JavaScript `RegExp`
+objects.
+
+## Usage
+
+```javascript
+var minimatch = require("minimatch")
+
+minimatch("bar.foo", "*.foo") // true!
+minimatch("bar.foo", "*.bar") // false!
+```
+
+## Features
+
+Supports all glob features.
+
+See:
+
+* `man sh`
+* `man fnmatch`
+* `man 5 gitignore`
+
+### Departures from zsh/bash/ksh/sh
+
+If the pattern starts with a `!` character, then it is negated.
+
+If a pattern starts with `#`, then it is treated as a comment, and
+will not match anything. (Use `\#` to match a literal `#` at the
+start of a line.)
+
+The double-star `**` is always supported, instead of requiring a special
+flag.
+
+If an escaped pattern has no matches, and the `null` flag is not set,
+then minimatch.match returns the pattern as-provided, rather than
+interpreting the character escapes. For example,
+`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
+`"*a?"`.
+
+## Functions
+
+### minimatch(path, pattern, options)
+
+Main export. Tests a path against
+the pattern using the options.
+
+### minimatch.filter(pattern, options)
+
+Returns a function that tests its
+supplied argument, suitable for use with `Array.filter`.
+
+### minimatch.match(list, pattern, options)
+
+Match against the list of
+files, in the style of fnmatch or glob. If nothing is matched, then
+return the pattern (unless `{ null: true }` in the options.)
+
+### minimatch.makeRe(pattern, options)
+
+Make a regular expression object
+from the pattern.
+
+## Options
+
+All options are `false` by default.
+
+### debug
+
+Dump a ton of stuff to stderr.
+
+### null
+
+Return an empty list from minimatch.match, instead of a list
+containing the pattern itself.
+
+### nocase
+
+Perform a case-insensitive match.
+
+### cache
+
+An LRU cache with `.get(k)` and `.set(k,v)` methods. By
+default, an instance of `node-lru-cache` is used, with 1000 max
+entries.
+
+### slash
+
+If set, then `a/*` will match `a/` as well as `a/b`.
+
+### matchBase
+
+If set, then patterns without slashes will be matched
+against the basename of the path if it contains slashes. For example,
+`a?b` would match `xyz/123/acb`.
+
+### partial
+
+Internal. Used by `minimatch.makeRe`.
+
+### dot
+
+Allow patterns to match paths starting with a period, even if
+the pattern does not explicitly start with a period.
diff --git a/deps/npm/node_modules/minimatch/minimatch.js b/deps/npm/node_modules/minimatch/minimatch.js
new file mode 100644
index 0000000000..76e7d6def0
--- /dev/null
+++ b/deps/npm/node_modules/minimatch/minimatch.js
@@ -0,0 +1,399 @@
+// This is a JavaScript implementation of the fnmatch-like
+// stuff that git uses in its .gitignore files.
+// See `man 5 gitignore`.
+
+module.exports = minimatch
+
+var path = require("path")
+ , LRU = require("lru-cache")
+
+minimatch.filter = function (pattern, options) {
+ options = options || {}
+ return function (p, i, list) {
+ return minimatch(p, pattern, options)
+ }
+}
+
+minimatch.match = function (list, pattern, options) {
+ if (!options) options = {}
+ var ret = list.filter(minimatch.filter(pattern, options))
+
+ // set the null flag to allow empty match sets
+ // Note that minimatch itself, and filter(), do not
+ // respect this flag, only minimatch.match(list, pattern) does.
+ if (!options.null && !ret.length) {
+ return [pattern]
+ }
+
+ return ret
+}
+
+function minimatch (p, pattern, options) {
+ if (typeof pattern !== "string") {
+ throw new TypeError("glob pattern string required")
+ }
+
+ options = options || {}
+
+ // to set the cache, just replace with a different obj
+ // supporting set(k,v) and v=get(k) methods.
+ var cache = options.cache || minimatch.cache
+ if (!cache) cache = minimatch.cache = new LRU(1000)
+
+ // "" only matches ""
+ if (!pattern) return p === ""
+
+ // comments.
+ if (pattern.trim().charAt(0) === "#") return false
+
+ // check the cache
+ var re = cache.get(pattern)
+ if (!re && re !== false) {
+ cache.set(pattern, re = minimatch.makeRe(pattern, options))
+ }
+
+ if (options.debug) {
+ console.error(pattern + "\t" + re, JSON.stringify(p))
+ }
+
+ // some kind of invalid thing
+ if (!re) return false
+
+
+ // patterns that end in / can only match dirs
+ // however, dirs also match the same thing that *doesn't*
+ // end in a slash.
+ var match =
+ // a/ should not match a/*, but will match */
+ // accomplish this by not applying the regexp
+ // directly, unless the pattern would match
+ // trailing slash'ed things, or the thing isn't
+ // a trailing slash, or slashes are opted-in
+ ( ( options.slash ||
+ p.substr(-1) !== "/" ||
+ pattern.substr(-1) === "/" )
+ && !!p.match(re) )
+
+ // a/ should match * or a
+ || ( p.substr(-1) === "/" &&
+ !!p.slice(0, -1).match(re) )
+
+ // a pattern with *no* slashes will match against
+ // either the full path, or just the basename.
+ || ( options.matchBase &&
+ pattern.indexOf("/") === -1 &&
+ path.basename(p).match(re) )
+
+ //console.error(" MINIMATCH: %j %j %j %j",
+ // re.toString(), pattern, p, match)
+ return match
+}
+
+minimatch.makeRe = makeRe
+function makeRe (pattern, options) {
+ options = options || {}
+
+ var braceDepth = 0
+ , re = ""
+ , escaping = false
+ , oneStar = "[^\\/]*?"
+ , twoStar = ".*?"
+ , reSpecials = "().*{}+?[]^$/\\"
+ , patternListStack = []
+ , stateChar
+ , negate = false
+ , negating = false
+ , inClass = false
+ , reClassStart = []
+
+ for ( var i = 0, len = pattern.length, c
+ ; (i < len) && (c = pattern.charAt(i))
+ ; i ++ ) {
+
+ if (options.debug) {
+ console.error("%s\t%s %s %j", pattern, i, re, c)
+ }
+
+ switch (c) {
+ case "\\":
+ if (stateChar) {
+ if (stateChar === "*") re += oneStar
+ else re += "\\" + stateChar
+ stateChar = false
+ }
+ if (escaping) {
+ re += "\\\\" // must match literal \
+ escaping = false
+ } else {
+ escaping = true
+ }
+ continue
+
+ case "!":
+ if (i === 0 || negating) {
+ negate = !negate
+ negating = true
+ break
+ } else {
+ negating = false
+ }
+ // fallthrough
+ case "+":
+ case "@":
+ case "*":
+ case "?":
+ if (options.debug) {
+ console.error("%s\t%s %s %j <-- stateChar", pattern, i, re, c)
+ }
+
+ negating = false
+ if (escaping) {
+ re += "\\" + c
+ escaping = false
+ } else if (inClass) {
+ re += c
+ } else if (c === "*" && stateChar === "*") { // **
+ re += twoStar
+ stateChar = false
+ } else {
+ if (stateChar) {
+ if (stateChar === "*") re += oneStar
+ else if (stateChar === "?") re += "."
+ else re += "\\" + stateChar
+ }
+ stateChar = c
+ }
+ continue
+
+ case "(":
+ if (escaping) {
+ re += "\\("
+ escaping = false
+ } else if (inClass) {
+ re += "("
+ } else if (stateChar) {
+ plType = stateChar
+ patternListStack.push(plType)
+ re += stateChar === "!" ? "(?!" : "(?:"
+ stateChar = false
+ } else {
+ re += "\\("
+ }
+ continue
+
+ case ")":
+ if (escaping || inClass) {
+ re += "\\)"
+ escaping = false
+ } else if (patternListStack.length) {
+ re += ")"
+ plType = patternListStack.pop()
+ switch (plType) {
+ case "?":
+ case "+":
+ case "*": re += plType
+ case "!":
+ case "@": break
+ }
+ } else {
+ re += "\\)"
+ }
+ continue
+
+ case "|":
+ if (escaping || inClass) {
+ re += "\\|"
+ escaping = false
+ } else if (patternListStack.length) {
+ re += "|"
+ } else {
+ re += "\\|"
+ }
+ continue
+
+ // these are mostly the same in regexp and glob :)
+ case "[":
+ if (stateChar) {
+ // some state-tracking char was before the [
+ switch (stateChar) {
+ case "*":
+ re += oneStar
+ break
+ case "?":
+ re += "."
+ break
+ default:
+ re += "\\"+stateChar
+ break
+ }
+ stateChar = false
+ }
+
+ if (escaping || inClass) {
+ re += "\\" + c
+ escaping = false
+ } else {
+ inClass = true
+ classStart = i
+ reClassStart = re.length
+ re += c
+ }
+ continue
+
+ case "]":
+ // a right bracket shall lose its special
+ // meaning and represent itself in
+ // a bracket expression if it occurs
+ // first in the list. -- POSIX.2 2.8.3.2
+ if (i === classStart + 1) escaping = true
+
+ if (escaping || !inClass) {
+ re += "\\" + c
+ escaping = false
+ } else {
+ inClass = false
+ re += c
+ }
+ continue
+
+ case "{":
+ if (escaping || inClass) {
+ re += "\\{"
+ escaping = false
+ } else {
+ re += "(?:"
+ braceDepth ++
+ }
+ continue
+
+ case "}":
+ if (escaping || inClass || braceDepth === 0) {
+ re += "\\}"
+ escaping = false
+ } else {
+ re += ")"
+ braceDepth --
+ }
+ continue
+
+ case ",":
+ if (escaping || inClass || braceDepth === 0) {
+ re += ","
+ escaping = false
+ } else {
+ re += "|"
+ }
+ continue
+
+ default:
+ if (stateChar) {
+ // we had some state-tracking character
+ // that wasn't consumed by this pass.
+ switch (stateChar) {
+ case "*":
+ re += oneStar
+ break
+ case "?":
+ re += "."
+ break
+ default:
+ re += "\\"+stateChar
+ break
+ }
+ stateChar = false
+ }
+
+ if (escaping) {
+ // no need
+ escaping = false
+ } else if (reSpecials.indexOf(c) !== -1
+ && !(c === "^" && inClass)) {
+ re += "\\"
+ }
+
+ re += c
+
+ } // switch
+
+ if (negating && c !== "!") negating = false
+
+ } // for
+
+ // handle trailing things that only matter at the very end.
+ if (stateChar) {
+ // we had some state-tracking character
+ // that wasn't consumed by this pass.
+ switch (stateChar) {
+ case "*":
+ re += oneStar
+ break
+ case "?":
+ re += "."
+ break
+ default:
+ re += "\\"+stateChar
+ break
+ }
+ stateChar = false
+ } else if (escaping) {
+ re += "\\\\"
+ }
+
+ // "[abc" is valid, equivalent to "\[abc"
+ if (inClass) {
+ // split where the last [ was, and escape it
+ // this is a huge pita. We now have to re-walk
+ // the contents of the would-be class to re-translate
+ // any characters that were passed through as-is
+ var cs = re.substr(reClassStart + 1)
+ , csOpts = Object.create(options)
+ csOpts.partial = true
+
+ re = re.substr(0, reClassStart) + "\\["
+ + makeRe(cs, csOpts)
+ }
+
+ if (options.partial) return re
+
+ // don't match "." files unless pattern starts with "."
+ if (!options.dot && pattern.charAt(0) !== ".") {
+ re = "(?!\\.)" + re
+ }
+
+ // must match entire pattern
+ // ending in a * or ** will make it less strict.
+ re = "^" + re + "$"
+
+ // fail on the pattern, but allow anything otherwise.
+ if (negate) re = "^(?!" + re + ").*$"
+
+ // really insane glob patterns can cause bad things.
+ var flags = ""
+ if (options.nocase) flags += "i"
+
+ if (options.debug) {
+ console.error("/%s/%s", re, flags)
+ }
+
+ try {
+ return new RegExp(re, flags)
+ } catch(ex) {
+ return false
+ }
+}
+
+if (require.main === module) {
+ // more tests in test/*.js
+ var tests = ["{a,b{c,d}}"
+ ,"a.*$?"
+ ,"\\{a,b{c,d}}"
+ ,"a/{c/,}d/{e/,f/{g,h,i}/}k"
+ ,"!*.bak"
+ ,"!!*.bak"
+ ,"!!!*.bak"
+ ,"\\a\\b\\c\\d"
+ ]
+ tests.forEach(function (t) {
+ console.log([t,makeRe(t)])
+ })
+}
diff --git a/deps/npm/node_modules/minimatch/node_modules/lru-cache/LICENSE b/deps/npm/node_modules/minimatch/node_modules/lru-cache/LICENSE
new file mode 100644
index 0000000000..05a4010949
--- /dev/null
+++ b/deps/npm/node_modules/minimatch/node_modules/lru-cache/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/minimatch/node_modules/lru-cache/README.md b/deps/npm/node_modules/minimatch/node_modules/lru-cache/README.md
new file mode 100644
index 0000000000..1f5f155b7f
--- /dev/null
+++ b/deps/npm/node_modules/minimatch/node_modules/lru-cache/README.md
@@ -0,0 +1,12 @@
+# lru cache
+
+A cache object that deletes the least-recently-used items.
+
+Usage:
+
+ var LRU = require("lru-cache")
+ , cache = LRU(10) // max 10 items. default = Infinity
+ cache.set("key", "value")
+ cache.get("key") // "value"
+
+RTFS for more info.
diff --git a/deps/npm/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js b/deps/npm/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js
new file mode 100644
index 0000000000..39b6c2d88b
--- /dev/null
+++ b/deps/npm/node_modules/minimatch/node_modules/lru-cache/lib/lru-cache.js
@@ -0,0 +1,151 @@
+
+module.exports = LRUCache
+
+function hOP (obj, key) {
+ return Object.prototype.hasOwnProperty.call(obj, key)
+}
+
+function LRUCache (maxLength) {
+ if (!(this instanceof LRUCache)) {
+ return new LRUCache(maxLength)
+ }
+ var cache = {} // hash of items by key
+ , lruList = {} // list of items in order of use recency
+ , lru = 0 // least recently used
+ , mru = 0 // most recently used
+ , length = 0 // number of items in the list
+
+ // resize the cache when the maxLength changes.
+ Object.defineProperty(this, "maxLength",
+ { set : function (mL) {
+ if (!mL || !(typeof mL === "number") || mL <= 0 ) mL = Infinity
+ maxLength = mL
+ // if it gets above double maxLength, trim right away.
+ // otherwise, do it whenever it's convenient.
+ if (length > maxLength) trim()
+ }
+ , get : function () { return maxLength }
+ , enumerable : true
+ })
+ this.maxLength = maxLength
+ Object.defineProperty(this, "length",
+ { get : function () { return length }
+ , enumerable : true
+ })
+
+ this.set = function (key, value) {
+ if (hOP(cache, key)) {
+ this.get(key)
+ cache[key].value = value
+ return undefined
+ }
+ var hit = {key:key, value:value, lu:mru++}
+ lruList[hit.lu] = cache[key] = hit
+ length ++
+ if (length > maxLength) trim()
+ }
+ this.get = function (key) {
+ if (!hOP(cache, key)) return undefined
+ var hit = cache[key]
+ delete lruList[hit.lu]
+ if (hit.lu === lru) lruWalk()
+ hit.lu = mru ++
+ lruList[hit.lu] = hit
+ return hit.value
+ }
+ this.del = function (key) {
+ if (!hOP(cache, key)) return undefined
+ var hit = cache[key]
+ delete cache[key]
+ delete lruList[hit.lu]
+ if (hit.lu === lru) lruWalk()
+ length --
+ }
+ function lruWalk () {
+ // lru has been deleted, hop up to the next hit.
+ lru = Object.keys(lruList).shift()
+ }
+ function trim () {
+ if (length <= maxLength) return undefined
+ var prune = Object.keys(lruList).slice(0, length - maxLength)
+ for (var i = 0, l = (length - maxLength); i < l; i ++) {
+ delete cache[ lruList[prune[i]].key ]
+ delete lruList[prune[i]]
+ }
+ length = maxLength
+ lruWalk()
+ }
+}
+
+if (!process || !module || module !== process.mainModule) return undefined
+
+var l = LRUCache(3)
+ , assert = require("assert")
+
+l.set(1, 1)
+l.set(2, 1)
+l.set(3, 1)
+l.set(4, 1)
+l.set(5, 1)
+l.set(6, 1)
+
+assert.equal(l.get(1), undefined)
+assert.equal(l.get(2), undefined)
+assert.equal(l.get(3), undefined)
+assert.equal(l.get(4), 1)
+assert.equal(l.get(5), 1)
+assert.equal(l.get(6), 1)
+
+// now keep re-getting the 6 so it remains the most recently used.
+// in this case, we'll have 6, 7, 8, 9, 10, 11, so the ending length = 5
+l.set(7, 1)
+l.get(6)
+l.set(8, 1)
+l.get(6)
+l.set(9, 1)
+l.get(6)
+l.set(10, 1)
+l.get(6)
+l.set(11, 1)
+assert.equal(l.length, 3)
+assert.equal(l.get(4), undefined)
+assert.equal(l.get(5), undefined)
+assert.equal(l.get(6), 1)
+assert.equal(l.get(7), undefined)
+assert.equal(l.get(8), undefined)
+assert.equal(l.get(9), undefined)
+assert.equal(l.get(10), 1)
+assert.equal(l.get(11), 1)
+
+// test changing the maxLength, verify that the LRU items get dropped.
+l.maxLength = 100
+for (var i = 0; i < 100; i ++) l.set(i, i)
+assert.equal(l.length, 100)
+for (var i = 0; i < 100; i ++) {
+ assert.equal(l.get(i), i)
+}
+l.maxLength = 3
+assert.equal(l.length, 3)
+for (var i = 0; i < 97; i ++) {
+ assert.equal(l.get(i), undefined)
+}
+for (var i = 98; i < 100; i ++) {
+ assert.equal(l.get(i), i)
+}
+
+// now remove the maxLength restriction, and try again.
+l.maxLength = "hello"
+for (var i = 0; i < 100; i ++) l.set(i, i)
+assert.equal(l.length, 100)
+for (var i = 0; i < 100; i ++) {
+ assert.equal(l.get(i), i)
+}
+// should trigger an immediate resize
+l.maxLength = 3
+assert.equal(l.length, 3)
+for (var i = 0; i < 97; i ++) {
+ assert.equal(l.get(i), undefined)
+}
+for (var i = 98; i < 100; i ++) {
+ assert.equal(l.get(i), i)
+}
diff --git a/deps/npm/node_modules/minimatch/node_modules/lru-cache/package.json b/deps/npm/node_modules/minimatch/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000..289f839d31
--- /dev/null
+++ b/deps/npm/node_modules/minimatch/node_modules/lru-cache/package.json
@@ -0,0 +1,13 @@
+{ "name" : "lru-cache"
+, "description" : "A cache object that deletes the least-recently-used items."
+, "version" : "1.0.4"
+, "author" : "Isaac Z. Schlueter <i@izs.me>"
+, "scripts" : { "test" : "node lib/lru-cache.js" }
+, "main" : "lib/lru-cache"
+, "repository" : "git://github.com/isaacs/node-lru-cache.git"
+, "licenses" :
+ [ { "type" : "MIT"
+ , "url" : "http://github.com/isaacs/node-lru-cache/raw/master/LICENSE"
+ }
+ ]
+}
diff --git a/deps/npm/node_modules/minimatch/package.json b/deps/npm/node_modules/minimatch/package.json
new file mode 100644
index 0000000000..b83bebb91b
--- /dev/null
+++ b/deps/npm/node_modules/minimatch/package.json
@@ -0,0 +1,32 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
+ "name": "minimatch",
+ "description": "a glob matcher in javascript",
+ "version": "0.0.4",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/minimatch.git"
+ },
+ "main": "minimatch.js",
+ "scripts": {
+ "test": "tap test"
+ },
+ "engines": {
+ "node": "*"
+ },
+ "bundleDependencies": [
+ "lru-cache"
+ ],
+ "dependencies": {
+ "lru-cache": "~1.0.2"
+ },
+ "devDependencies": {
+ "tap": "~0.0.5"
+ },
+ "licenses" : [
+ {
+ "type" : "MIT",
+ "url" : "http://github.com/isaacs/minimatch/raw/master/LICENSE"
+ }
+ ]
+}
diff --git a/deps/npm/node_modules/minimatch/test/basic.js b/deps/npm/node_modules/minimatch/test/basic.js
new file mode 100644
index 0000000000..e4c5437e1c
--- /dev/null
+++ b/deps/npm/node_modules/minimatch/test/basic.js
@@ -0,0 +1,141 @@
+// http://www.bashcookbook.com/bashinfo/source/bash-1.14.7/tests/glob-test
+
+var tap = require("tap")
+ , mm = require("../")
+ , files = [ "a", "b", "c", "d", "abc"
+ , "abd", "abe", "bb", "bcd"
+ , "ca", "cb", "dd", "de"
+ , "bdir/", "bdir/cfile"]
+ , next = files.concat([ "a-b", "aXb"
+ , ".x", ".y" ])
+
+tap.test("basic tests", function (t) {
+ // [ pattern, [matches], MM opts, files, TAP opts]
+ ; [ "http://www.bashcookbook.com/bashinfo" +
+ "/source/bash-1.14.7/tests/glob-test"
+ , ["a*", ["a", "abc", "abd", "abe"]]
+ , ["X*", ["X*"]]
+ // allow null glob expansion
+ , ["X*", [], { null: true }]
+
+ // isaacs: Slightly different than bash/sh/ksh
+ // \\* is not un-escaped to literal "*" in a failed match,
+ // but it does make it get treated as a literal star
+ , ["\\*", ["\\*"]]
+ , ["\\**", ["\\**"]]
+
+ , ["b*/", ["bdir/"]]
+ , ["c*", ["c", "ca", "cb"]]
+ , ["**", files]
+
+
+ , ["\\.\\./*/", ["\\.\\./*/"]]
+ , ["s/\\..*//", ["s/\\..*//"]]
+
+ // legendary larry crashes bashes
+ , ["/^root:/{s/^[^:]*:[^:]*:\([^:]*\).*$/\\1/"
+ , ["/^root:/{s/^[^:]*:[^:]*:\([^:]*\).*$/\\1/"]]
+ , ["/^root:/{s/^[^:]*:[^:]*:\([^:]*\).*$/\1/"
+ , ["/^root:/{s/^[^:]*:[^:]*:\([^:]*\).*$/\1/"]]
+
+ // character classes
+ , ["[a-c]b*", ["abc", "abd", "abe", "bb", "cb"]]
+ , ["[a-y]*[^c]", ["abd", "abe", "bb", "bcd",
+ "bdir/", "ca", "cb", "dd", "de"]]
+ , ["a*[^c]", ["abd", "abe"]]
+ , function () { files.push("a-b", "aXb") }
+ , ["a[X-]b", ["a-b", "aXb"]]
+ , function () { files.push(".x", ".y") }
+ , ["[^a-c]*", ["d", "dd", "de"]]
+ , function () { files.push("a*b/", "a*b/ooo") }
+ , ["a\\*b/*", ["a*b/ooo"]]
+ , ["a\\*?/*", ["a*b/ooo"]]
+ , ["*\\\\!*", [], {null: true}, ["echo !7"]]
+ , ["*\\!*", ["echo !7"], null, ["echo !7"]]
+ , ["*.\\*", ["r.*"], null, ["r.*"]]
+ , ["a[b]c", ["abc"]]
+ , ["a[\\b]c", ["abc"]]
+ , ["a?c", ["abc"]]
+ , ["a\\*c", [], {null: true}, ["abc"]]
+ , ["", [""], { null: true }, [""]]
+
+ , "http://www.opensource.apple.com/source/bash/bash-23/" +
+ "bash/tests/glob-test"
+ , function () { files.push("man/", "man/man1/", "man/man1/bash.1") }
+ , ["*/man*/bash.*", ["man/man1/bash.1"]]
+ , ["man/man1/bash.1", ["man/man1/bash.1"]]
+ , ["a***c", ["abc"], null, ["abc"]]
+ , ["a*****?c", ["abc"], null, ["abc"]]
+ , ["?*****??", ["abc"], null, ["abc"]]
+ , ["*****??", ["abc"], null, ["abc"]]
+ , ["?*****?c", ["abc"], null, ["abc"]]
+ , ["?***?****c", ["abc"], null, ["abc"]]
+ , ["?***?****?", ["abc"], null, ["abc"]]
+ , ["?***?****", ["abc"], null, ["abc"]]
+ , ["*******c", ["abc"], null, ["abc"]]
+ , ["*******?", ["abc"], null, ["abc"]]
+ , ["a*cd**?**??k", ["abcdecdhjk"], null, ["abcdecdhjk"]]
+ , ["a**?**cd**?**??k", ["abcdecdhjk"], null, ["abcdecdhjk"]]
+ , ["a**?**cd**?**??k***", ["abcdecdhjk"], null, ["abcdecdhjk"]]
+ , ["a**?**cd**?**??***k", ["abcdecdhjk"], null, ["abcdecdhjk"]]
+ , ["a**?**cd**?**??***k**", ["abcdecdhjk"], null, ["abcdecdhjk"]]
+ , ["a****c**?**??*****", ["abcdecdhjk"], null, ["abcdecdhjk"]]
+ , ["[-abc]", ["-"], null, ["-"]]
+ , ["[abc-]", ["-"], null, ["-"]]
+ , ["\\", ["\\"], null, ["\\"]]
+ , ["[\\\\]", ["\\"], null, ["\\"]]
+ , ["[[]", ["["], null, ["["]]
+ , ["[", ["["], null, ["["]]
+ , ["[*", ["[abc"], null, ["[abc"]]
+ , "a right bracket shall lose its special meaning and " +
+ "represent itself in a bracket expression if it occurs " +
+ "first in the list. -- POSIX.2 2.8.3.2"
+ , ["[]]", ["]"], null, ["]"]]
+ , ["[]-]", ["]"], null, ["]"]]
+ , ["[a-\z]", ["p"], null, ["p"]]
+ , ["[/\\\\]*", ["/tmp"], null, ["/tmp"]]
+ , ["??**********?****?", [], { null: true }, ["abc"]]
+ , ["??**********?****c", [], { null: true }, ["abc"]]
+ , ["?************c****?****", [], { null: true }, ["abc"]]
+ , ["*c*?**", [], { null: true }, ["abc"]]
+ , ["a*****c*?**", [], { null: true }, ["abc"]]
+ , ["a********???*******", [], { null: true }, ["abc"]]
+ , ["[]", [], { null: true }, ["a"]]
+ , ["[abc", [], { null: true }, ["["]]
+
+ , "nocase tests"
+ , ["XYZ", ["xYz"], { nocase: true, null: true }
+ , ["xYz", "ABC", "IjK"]]
+ , ["ab*", ["ABC"], { nocase: true, null: true }
+ , ["xYz", "ABC", "IjK"]]
+ , ["[ia]?[ck]", ["ABC", "IjK"], { nocase: true, null: true }
+ , ["xYz", "ABC", "IjK"]]
+
+ ].forEach(function (c) {
+ if (typeof c === "function") return c()
+ if (typeof c === "string") return t.comment(c)
+
+ var pattern = c[0]
+ , expect = c[1].sort(alpha)
+ , options = c[2] || {}
+ , f = c[3] || files
+ , tapOpts = c[4] || {}
+
+ // options.debug = true
+ var r = mm.makeRe(pattern, options)
+ tapOpts.re = String(r) || JSON.stringify(r)
+ tapOpts.files = JSON.stringify(f)
+ tapOpts.pattern = pattern
+
+ var actual = mm.match(f, pattern, options)
+
+ t.equivalent( actual, expect
+ , JSON.stringify(pattern) + " " + JSON.stringify(expect)
+ , c[4] )
+ })
+ t.end()
+})
+
+function alpha (a, b) {
+ return a > b ? 1 : -1
+}
diff --git a/deps/npm/node_modules/mkdirp/LICENSE b/deps/npm/node_modules/mkdirp/LICENSE
new file mode 100644
index 0000000000..432d1aeb01
--- /dev/null
+++ b/deps/npm/node_modules/mkdirp/LICENSE
@@ -0,0 +1,21 @@
+Copyright 2010 James Halliday (mail@substack.net)
+
+This project is free software released under the MIT/X11 license:
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/deps/npm/node_modules/mkdirp/README.markdown b/deps/npm/node_modules/mkdirp/README.markdown
new file mode 100644
index 0000000000..0393c4ea53
--- /dev/null
+++ b/deps/npm/node_modules/mkdirp/README.markdown
@@ -0,0 +1,21 @@
+mkdirp
+======
+
+Like `mkdir -p`, but in node.js!
+
+Example
+=======
+
+pow.js
+------
+ var mkdirp = require('mkdirp');
+
+ mkdirp('/tmp/foo/bar/baz', 0755, function (err) {
+ if (err) console.error(err)
+ else console.log('pow!')
+ });
+
+Output
+ pow!
+
+And now /tmp/foo/bar/baz exists, huzzah!
diff --git a/deps/npm/node_modules/mkdirp/examples/pow.js b/deps/npm/node_modules/mkdirp/examples/pow.js
new file mode 100644
index 0000000000..7741462212
--- /dev/null
+++ b/deps/npm/node_modules/mkdirp/examples/pow.js
@@ -0,0 +1,6 @@
+var mkdirp = require('mkdirp');
+
+mkdirp('/tmp/foo/bar/baz', 0755, function (err) {
+ if (err) console.error(err)
+ else console.log('pow!')
+});
diff --git a/deps/npm/node_modules/mkdirp/index.js b/deps/npm/node_modules/mkdirp/index.js
new file mode 100644
index 0000000000..6602801463
--- /dev/null
+++ b/deps/npm/node_modules/mkdirp/index.js
@@ -0,0 +1,36 @@
+var path = require('path');
+var fs = require('fs');
+
+module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP;
+
+function mkdirP (p, mode, f) {
+ var cb = f || function () {};
+ if (typeof mode === 'string') mode = parseInt(mode, 8);
+ p = path.resolve(p);
+
+ fs.mkdir(p, mode, function (er) {
+ if (!er) return cb();
+ switch (er.code) {
+ case 'ENOENT':
+ mkdirP(path.dirname(p), mode, function (er) {
+ if (er) cb(er);
+ else mkdirP(p, mode, cb);
+ });
+ break;
+
+ case 'EEXIST':
+ fs.stat(p, function (er2, stat) {
+ // if the stat fails, then that's super weird.
+ // let the original EEXIST be the failure reason.
+ if (er2 || !stat.isDirectory()) cb(er)
+ else if ((stat.mode & 0777) !== mode) fs.chmod(p, mode, cb);
+ else cb();
+ });
+ break;
+
+ default:
+ cb(er);
+ break;
+ }
+ });
+}
diff --git a/deps/npm/node_modules/mkdirp/package.json b/deps/npm/node_modules/mkdirp/package.json
new file mode 100644
index 0000000000..99149f747d
--- /dev/null
+++ b/deps/npm/node_modules/mkdirp/package.json
@@ -0,0 +1,23 @@
+{
+ "name" : "mkdirp",
+ "description" : "Recursively mkdir, like `mkdir -p`",
+ "version" : "0.1.0",
+ "author" : "James Halliday <mail@substack.net> (http://substack.net)",
+ "main" : "./index",
+ "keywords" : [
+ "mkdir",
+ "directory"
+ ],
+ "repository" : {
+ "type" : "git",
+ "url" : "http://github.com/substack/node-mkdirp.git"
+ },
+ "scripts" : {
+ "test" : "tap test/*.js"
+ },
+ "devDependencies" : {
+ "tap" : "0.0.x"
+ },
+ "license" : "MIT/X11",
+ "engines": { "node": "*" }
+}
diff --git a/deps/npm/node_modules/mkdirp/test/chmod.js b/deps/npm/node_modules/mkdirp/test/chmod.js
new file mode 100644
index 0000000000..0609694e38
--- /dev/null
+++ b/deps/npm/node_modules/mkdirp/test/chmod.js
@@ -0,0 +1,39 @@
+var mkdirp = require('../').mkdirp;
+var path = require('path');
+var fs = require('fs');
+var test = require('tap').test;
+
+var ps = [ '', 'tmp' ];
+
+for (var i = 0; i < 25; i++) {
+ var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
+ ps.push(dir);
+}
+
+var file = ps.join('/');
+
+test('chmod-pre', function (t) {
+ var mode = 0744
+ mkdirp(file, mode, function (er) {
+ t.ifError(er, 'should not error');
+ fs.stat(file, function (er, stat) {
+ t.ifError(er, 'should exist');
+ t.ok(stat && stat.isDirectory(), 'should be directory');
+ t.equal(stat && stat.mode & 0777, mode, 'should be 0744');
+ t.end();
+ });
+ });
+});
+
+test('chmod', function (t) {
+ var mode = 0755
+ mkdirp(file, mode, function (er) {
+ t.ifError(er, 'should not error');
+ fs.stat(file, function (er, stat) {
+ t.ifError(er, 'should exist');
+ t.ok(stat && stat.isDirectory(), 'should be directory');
+ t.equal(stat && stat.mode & 0777, mode, 'should be 0755');
+ t.end();
+ });
+ });
+});
diff --git a/deps/npm/node_modules/mkdirp/test/clobber.js b/deps/npm/node_modules/mkdirp/test/clobber.js
new file mode 100644
index 0000000000..0eb7099870
--- /dev/null
+++ b/deps/npm/node_modules/mkdirp/test/clobber.js
@@ -0,0 +1,37 @@
+var mkdirp = require('../').mkdirp;
+var path = require('path');
+var fs = require('fs');
+var test = require('tap').test;
+
+var ps = [ '', 'tmp' ];
+
+for (var i = 0; i < 25; i++) {
+ var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
+ ps.push(dir);
+}
+
+var file = ps.join('/');
+
+// a file in the way
+var itw = ps.slice(0, 3).join('/');
+
+
+test('clobber-pre', function (t) {
+ console.error("about to write to "+itw)
+ fs.writeFileSync(itw, 'I AM IN THE WAY, THE TRUTH, AND THE LIGHT.');
+
+ fs.stat(itw, function (er, stat) {
+ t.ifError(er)
+ t.ok(stat && stat.isFile(), 'should be file')
+ t.end()
+ })
+})
+
+test('clobber', function (t) {
+ t.plan(2);
+ mkdirp(file, 0755, function (err) {
+ t.ok(err);
+ t.equal(err.code, 'ENOTDIR');
+ t.end();
+ });
+});
diff --git a/deps/npm/node_modules/mkdirp/test/mkdirp.js b/deps/npm/node_modules/mkdirp/test/mkdirp.js
new file mode 100644
index 0000000000..b07cd70c10
--- /dev/null
+++ b/deps/npm/node_modules/mkdirp/test/mkdirp.js
@@ -0,0 +1,28 @@
+var mkdirp = require('../');
+var path = require('path');
+var fs = require('fs');
+var test = require('tap').test;
+
+test('woo', function (t) {
+ t.plan(2);
+ var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
+ var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
+ var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
+
+ var file = '/tmp/' + [x,y,z].join('/');
+
+ mkdirp(file, 0755, function (err) {
+ if (err) t.fail(err);
+ else path.exists(file, function (ex) {
+ if (!ex) t.fail('file not created')
+ else fs.stat(file, function (err, stat) {
+ if (err) t.fail(err)
+ else {
+ t.equal(stat.mode & 0777, 0755);
+ t.ok(stat.isDirectory(), 'target not a directory');
+ t.end();
+ }
+ })
+ })
+ });
+});
diff --git a/deps/npm/node_modules/mkdirp/test/race.js b/deps/npm/node_modules/mkdirp/test/race.js
new file mode 100644
index 0000000000..96a0447636
--- /dev/null
+++ b/deps/npm/node_modules/mkdirp/test/race.js
@@ -0,0 +1,41 @@
+var mkdirp = require('../').mkdirp;
+var path = require('path');
+var fs = require('fs');
+var test = require('tap').test;
+
+test('race', function (t) {
+ t.plan(4);
+ var ps = [ '', 'tmp' ];
+
+ for (var i = 0; i < 25; i++) {
+ var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
+ ps.push(dir);
+ }
+ var file = ps.join('/');
+
+ var res = 2;
+ mk(file, function () {
+ if (--res === 0) t.end();
+ });
+
+ mk(file, function () {
+ if (--res === 0) t.end();
+ });
+
+ function mk (file, cb) {
+ mkdirp(file, 0755, function (err) {
+ if (err) t.fail(err);
+ else path.exists(file, function (ex) {
+ if (!ex) t.fail('file not created')
+ else fs.stat(file, function (err, stat) {
+ if (err) t.fail(err)
+ else {
+ t.equal(stat.mode & 0777, 0755);
+ t.ok(stat.isDirectory(), 'target not a directory');
+ if (cb) cb();
+ }
+ })
+ })
+ });
+ }
+});
diff --git a/deps/npm/node_modules/mkdirp/test/rel.js b/deps/npm/node_modules/mkdirp/test/rel.js
new file mode 100644
index 0000000000..79858243ab
--- /dev/null
+++ b/deps/npm/node_modules/mkdirp/test/rel.js
@@ -0,0 +1,32 @@
+var mkdirp = require('../');
+var path = require('path');
+var fs = require('fs');
+var test = require('tap').test;
+
+test('rel', function (t) {
+ t.plan(2);
+ var x = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
+ var y = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
+ var z = Math.floor(Math.random() * Math.pow(16,4)).toString(16);
+
+ var cwd = process.cwd();
+ process.chdir('/tmp');
+
+ var file = [x,y,z].join('/');
+
+ mkdirp(file, 0755, function (err) {
+ if (err) t.fail(err);
+ else path.exists(file, function (ex) {
+ if (!ex) t.fail('file not created')
+ else fs.stat(file, function (err, stat) {
+ if (err) t.fail(err)
+ else {
+ process.chdir(cwd);
+ t.equal(stat.mode & 0777, 0755);
+ t.ok(stat.isDirectory(), 'target not a directory');
+ t.end();
+ }
+ })
+ })
+ });
+});
diff --git a/deps/npm/node_modules/node-uuid/LICENSE.md b/deps/npm/node_modules/node-uuid/LICENSE.md
new file mode 100644
index 0000000000..bcdddf9a05
--- /dev/null
+++ b/deps/npm/node_modules/node-uuid/LICENSE.md
@@ -0,0 +1,3 @@
+Copyright (c) 2010 Robert Kieffer
+
+Dual licensed under the [MIT](http://en.wikipedia.org/wiki/MIT_License) and [GPL](http://en.wikipedia.org/wiki/GNU_General_Public_License) licenses.
diff --git a/deps/npm/node_modules/node-uuid/README.md b/deps/npm/node_modules/node-uuid/README.md
new file mode 100644
index 0000000000..c00675066f
--- /dev/null
+++ b/deps/npm/node_modules/node-uuid/README.md
@@ -0,0 +1,100 @@
+# node-uuid
+
+Simple, fast generation of RFC4122[RFC4122(v4)](http://www.ietf.org/rfc/rfc4122.txt) UUIDS. It runs in node.js and all major browsers.
+
+## Installation
+
+ npm install node-uuid
+
+### In browser
+
+ <script src="uuid.js"></script>
+
+### In node.js
+
+ var uuid = require('node-uuid');
+
+## Usage
+
+### Generate a String UUID
+
+ var id = uuid(); // -> '92329D39-6F5C-4520-ABFC-AAB64544E172'
+
+### Generate a Binary UUID
+
+ // Simple form - allocates a Buffer/Array for you
+ var buf = uuid('binary');
+ // node.js -> <Buffer 08 50 05 c8 9c b2 4c 07 ac 07 d1 4f b9 f5 04 51>
+ // browser -> [8, 80, 5, 200, 156, 178, 76, 7, 172, 7, 209, 79, 185, 245, 4, 81]
+
+ // Provide your own Buffer or Array
+ var buf = new Array(16);
+ uuid('binary', buf); // -> [8, 80, 5, 200, 156, 178, 76, 7, 172, 7, 209, 79, 185, 245, 4, 81]
+ var buf = new Buffer(16);
+ uuid('binary', buf); // -> <Buffer 08 50 05 c8 9c b2 4c 07 ac 07 d1 4f b9 f5 04 51>
+
+ // Provide your own Buffer/Array, plus specify offset
+ // (e.g. here we fill an array with 3 uuids)
+ var buf = new Buffer(16 \* 3);
+ uuid('binary', id, 0);
+ uuid('binary', id, 16);
+ uuid('binary', id, 32);
+
+## Testing
+
+test/test.js generates performance data (similar to test/benchmark.js). It also verifies the syntax of 100K string UUIDs, and logs the distribution of hex digits found therein. For example:
+
+ - - - Performance Data - - -
+ uuid(): 1052631 uuids/second
+ uuid('binary'): 680272 uuids/second
+ uuid('binary', buffer): 2702702 uuids/second
+
+ - - - Distribution of Hex Digits (% deviation from ideal) - - -
+ 0 |================================| 187705 (0.11%)
+ 1 |================================| 187880 (0.2%)
+ 2 |================================| 186875 (-0.33%)
+ 3 |================================| 186847 (-0.35%)
+ 4 |==================================================| 287433 (-0.02%)
+ 5 |================================| 187910 (0.22%)
+ 6 |================================| 188172 (0.36%)
+ 7 |================================| 187350 (-0.08%)
+ 8 |====================================| 211994 (-0.24%)
+ 9 |====================================| 212664 (0.08%)
+ A |=====================================| 213185 (0.32%)
+ B |=====================================| 212877 (0.18%)
+ C |================================| 187445 (-0.03%)
+ D |================================| 186737 (-0.41%)
+ E |================================| 187155 (-0.18%)
+ F |================================| 187771 (0.14%)
+
+Note that the increased values for 4 and 8-B are expected as part of the RFC4122 syntax (and are accounted for in the deviation calculation). BTW, if someone wants to do the calculation to determine what a statistically significant deviation would be, I'll gladly add that to the test.
+
+### In browser
+
+ Open test/test.html
+
+### In node.js
+
+ > node test/test.js
+
+node.js users can also run the node-uuid .vs. uuid.js benchmark:
+
+ > node test/benchmark.js
+
+## Performance
+
+### In node.js
+
+node-uuid is designed to be fast. That said, the target platform is node.js, where it is screaming fast. Here's what I get on my 2.66GHz Macbook Pro for the test/benchmark.js script:
+
+ nodeuuid(): 1126126 uuids/second
+ nodeuuid('binary'): 782472 uuids/second
+ nodeuuid('binary', buffer): 2688172 uuids/second
+ uuidjs(): 620347 uuids/second
+ uuidjs('binary'): 1275510 uuids/second
+
+The uuidjs() entries are for Nikhil Marathe's [uuidjs module](https://bitbucket.org/nikhilm/uuidjs), and are provided for comparison. uuidjs is a wrapper around the native libuuid library.
+
+### In browser
+
+node-uuid performance varies dramatically across browsers. For comprehensive test results, please [checkout the JSPerf tests](http://jsperf.com/node-uuid-performance).
diff --git a/deps/npm/node_modules/node-uuid/package.json b/deps/npm/node_modules/node-uuid/package.json
new file mode 100644
index 0000000000..bf70062abf
--- /dev/null
+++ b/deps/npm/node_modules/node-uuid/package.json
@@ -0,0 +1,12 @@
+{
+ "name" : "node-uuid",
+ "description" : "Simple, fast generation of RFC4122(v4) UUIDs.",
+ "url" : "http://github.com/broofa/node-uuid",
+ "keywords" : ["uuid", "guid", "rfc4122"],
+ "author" : "Robert Kieffer <robert@broofa.com>",
+ "contributors" : [],
+ "dependencies" : [],
+ "lib" : ".",
+ "main" : "./uuid.js",
+ "version" : "1.2.0"
+}
diff --git a/deps/npm/node_modules/node-uuid/test/benchmark-native.c b/deps/npm/node_modules/node-uuid/test/benchmark-native.c
new file mode 100644
index 0000000000..dbfc75f6d7
--- /dev/null
+++ b/deps/npm/node_modules/node-uuid/test/benchmark-native.c
@@ -0,0 +1,34 @@
+/*
+Test performance of native C UUID generation
+
+To Compile: cc -luuid benchmark-native.c -o benchmark-native
+*/
+
+#include <stdio.h>
+#include <unistd.h>
+#include <sys/time.h>
+#include <uuid/uuid.h>
+
+int main() {
+ uuid_t myid;
+ char buf[36+1];
+ int i;
+ struct timeval t;
+ double start, finish;
+
+ gettimeofday(&t, NULL);
+ start = t.tv_sec + t.tv_usec/1e6;
+
+ int n = 2e5;
+ for (i = 0; i < n; i++) {
+ uuid_generate(myid);
+ uuid_unparse(myid, buf);
+ }
+
+ gettimeofday(&t, NULL);
+ finish = t.tv_sec + t.tv_usec/1e6;
+ double dur = finish - start;
+
+ printf("%d uuids/sec", (int)(n/dur));
+ return 0;
+}
diff --git a/deps/npm/node_modules/node-uuid/test/benchmark.js b/deps/npm/node_modules/node-uuid/test/benchmark.js
new file mode 100644
index 0000000000..2505dc4a21
--- /dev/null
+++ b/deps/npm/node_modules/node-uuid/test/benchmark.js
@@ -0,0 +1,27 @@
+var nodeuuid = require('../uuid'),
+ uuidjs = require('uuid').generate,
+ N = 5e5;
+
+function rate(msg, t) {
+ console.log(msg + ': ' +
+ (N / (Date.now() - t) * 1e3 | 0) +
+ ' uuids/second');
+}
+
+// node-uuid - string form
+for (var i = 0, t = Date.now(); i < N; i++) nodeuuid();
+rate('nodeuuid()', t);
+
+for (var i = 0, t = Date.now(); i < N; i++) nodeuuid('binary');
+rate('nodeuuid(\'binary\')', t);
+
+var buffer = new nodeuuid.BufferClass(16);
+for (var i = 0, t = Date.now(); i < N; i++) nodeuuid('binary', buffer);
+rate('nodeuuid(\'binary\', buffer)', t);
+
+// node-uuid - string form
+for (var i = 0, t = Date.now(); i < N; i++) uuidjs();
+rate('uuidjs()', t);
+
+for (var i = 0, t = Date.now(); i < N; i++) uuidjs('binary');
+rate('uuidjs(\'binary\')', t);
diff --git a/deps/npm/node_modules/node-uuid/test/test.html b/deps/npm/node_modules/node-uuid/test/test.html
new file mode 100644
index 0000000000..89e0f2c44b
--- /dev/null
+++ b/deps/npm/node_modules/node-uuid/test/test.html
@@ -0,0 +1,14 @@
+<html>
+ <head>
+ <style>
+ div {
+ font-family: monospace;
+ font-size: 8pt;
+ }
+ </style>
+ <script src="../uuid.js"></script>
+ </head>
+ <body>
+ <script src="./test.js"></script>
+ </body>
+</html>
diff --git a/deps/npm/node_modules/node-uuid/test/test.js b/deps/npm/node_modules/node-uuid/test/test.js
new file mode 100644
index 0000000000..5037566eac
--- /dev/null
+++ b/deps/npm/node_modules/node-uuid/test/test.js
@@ -0,0 +1,83 @@
+if (typeof(uuid) == 'undefined') {
+ uuid = require('../uuid');
+}
+
+var UUID_FORMAT = /[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89a-fAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}/;
+var N = 1e5;
+
+function log(msg) {
+ if (typeof(document) != 'undefined') {
+ document.write('<div>' + msg + '</div>');
+ }
+ if (typeof(console) != 'undefined') {
+ console.log(msg);
+ }
+}
+
+function rate(msg, t) {
+ log(msg + ': ' + (N / (Date.now() - t) * 1e3 | 0) + ' uuids/second');
+}
+
+// Perf tests
+log('- - - Performance Data - - -');
+for (var i = 0, t = Date.now(); i < N; i++) uuid();
+rate('uuid()', t);
+for (var i = 0, t = Date.now(); i < N; i++) uuid('binary');
+rate('uuid(\'binary\')', t);
+var buf = new uuid.BufferClass(16);
+for (var i = 0, t = Date.now(); i < N; i++) uuid('binary', buf);
+rate('uuid(\'binary\', buffer)', t);
+
+var counts = {}, max = 0;
+
+var b = new uuid.BufferClass(16);
+for (var i = 0; i < N; i++) {
+ id = uuid();
+ if (!UUID_FORMAT.test(id)) {
+ throw Error(id + ' is not a valid UUID string');
+ }
+
+ if (id != uuid.unparse(uuid.parse(id))) {
+ throw Error(id + ' does not parse/unparse');
+ }
+
+ // Count digits for our randomness check
+ var digits = id.replace(/-/g, '').split('');
+ for (var j = digits.length-1; j >= 0; j--) {
+ var c = digits[j];
+ max = Math.max(max, counts[c] = (counts[c] || 0) + 1);
+ }
+}
+
+// Get %'age an actual value differs from the ideal value
+function divergence(actual, ideal) {
+ return Math.round(100*100*(actual - ideal)/ideal)/100;
+}
+
+log('<br />- - - Distribution of Hex Digits (% deviation from ideal) - - -');
+
+// Check randomness
+for (var i = 0; i < 16; i++) {
+ var c = i.toString(16);
+ var bar = '', n = counts[c], p = Math.round(n/max*100|0);
+
+ // 1-3,5-8, and D-F: 1:16 odds over 30 digits
+ var ideal = N*30/16;
+ if (i == 4) {
+ // 4: 1:1 odds on 1 digit, plus 1:16 odds on 30 digits
+ ideal = N*(1 + 30/16);
+ } else if (i >= 8 && i <= 11) {
+ // 8-B: 1:4 odds on 1 digit, plus 1:16 odds on 30 digits
+ ideal = N*(1/4 + 30/16);
+ } else {
+ // Otherwise: 1:16 odds on 30 digits
+ ideal = N*30/16;
+ }
+ var d = divergence(n, ideal);
+
+ // Draw bar using UTF squares (just for grins)
+ var s = n/max*50 | 0;
+ while (s--) bar += '=';
+
+ log(c + ' |' + bar + '| ' + counts[c] + ' (' + d + '%)');
+}
diff --git a/deps/npm/node_modules/node-uuid/uuid.js b/deps/npm/node_modules/node-uuid/uuid.js
new file mode 100644
index 0000000000..fdf6c54fd4
--- /dev/null
+++ b/deps/npm/node_modules/node-uuid/uuid.js
@@ -0,0 +1,80 @@
+(function() {
+ /*
+ * Generate a RFC4122(v4) UUID
+ *
+ * Documentation at https://github.com/broofa/node-uuid
+ */
+
+ // Use node.js Buffer class if available, otherwise use the Array class
+ var BufferClass = typeof(Buffer) == 'function' ? Buffer : Array;
+
+ // Buffer used for generating string uuids
+ var _buf = new BufferClass(16);
+
+ // Cache number <-> hex string for octet values
+ var toString = [];
+ var toNumber = {};
+ for (var i = 0; i < 256; i++) {
+ toString[i] = (i + 0x100).toString(16).substr(1);
+ toNumber[toString[i]] = i;
+ }
+
+ function parse(s) {
+ var buf = new BufferClass(16);
+ var i = 0, ton = toNumber;
+ s.toLowerCase().replace(/[0-9a-f][0-9a-f]/g, function(octet) {
+ buf[i++] = toNumber[octet];
+ });
+ return buf;
+ }
+
+ function unparse(buf) {
+ var tos = toString, b = buf;
+ return tos[b[0]] + tos[b[1]] + tos[b[2]] + tos[b[3]] + '-' +
+ tos[b[4]] + tos[b[5]] + '-' +
+ tos[b[6]] + tos[b[7]] + '-' +
+ tos[b[8]] + tos[b[9]] + '-' +
+ tos[b[10]] + tos[b[11]] + tos[b[12]] +
+ tos[b[13]] + tos[b[14]] + tos[b[15]];
+ }
+
+ var b32 = 0x100000000, ff = 0xff;
+ function uuid(fmt, buf, offset) {
+ var b = fmt != 'binary' ? _buf : (buf ? buf : new BufferClass(16));
+ var i = buf && offset || 0;
+
+ var r = Math.random()*b32;
+ b[i++] = r & ff;
+ b[i++] = r>>>8 & ff;
+ b[i++] = r>>>16 & ff;
+ b[i++] = r>>>24 & ff;
+ r = Math.random()*b32;
+ b[i++] = r & ff;
+ b[i++] = r>>>8 & ff;
+ b[i++] = r>>>16 & 0x0f | 0x40; // See RFC4122 sect. 4.1.3
+ b[i++] = r>>>24 & ff;
+ r = Math.random()*b32;
+ b[i++] = r & 0x3f | 0x80; // See RFC4122 sect. 4.4
+ b[i++] = r>>>8 & ff;
+ b[i++] = r>>>16 & ff;
+ b[i++] = r>>>24 & ff;
+ r = Math.random()*b32;
+ b[i++] = r & ff;
+ b[i++] = r>>>8 & ff;
+ b[i++] = r>>>16 & ff;
+ b[i++] = r>>>24 & ff;
+
+ return fmt === undefined ? unparse(b) : b;
+ };
+
+ uuid.parse = parse;
+ uuid.unparse = unparse;
+ uuid.BufferClass = BufferClass;
+
+ if (typeof(module) != 'undefined') {
+ module.exports = uuid;
+ } else {
+ // In browser? Set as top-level function
+ this.uuid = uuid;
+ }
+})();
diff --git a/deps/npm/node_modules/nopt/.gitignore b/deps/npm/node_modules/nopt/.gitignore
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/deps/npm/node_modules/nopt/.gitignore
diff --git a/deps/npm/node_modules/nopt/LICENSE b/deps/npm/node_modules/nopt/LICENSE
new file mode 100644
index 0000000000..05a4010949
--- /dev/null
+++ b/deps/npm/node_modules/nopt/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/nopt/README.md b/deps/npm/node_modules/nopt/README.md
new file mode 100644
index 0000000000..eeddfd4fe1
--- /dev/null
+++ b/deps/npm/node_modules/nopt/README.md
@@ -0,0 +1,208 @@
+If you want to write an option parser, and have it be good, there are
+two ways to do it. The Right Way, and the Wrong Way.
+
+The Wrong Way is to sit down and write an option parser. We've all done
+that.
+
+The Right Way is to write some complex configurable program with so many
+options that you go half-insane just trying to manage them all, and put
+it off with duct-tape solutions until you see exactly to the core of the
+problem, and finally snap and write an awesome option parser.
+
+If you want to write an option parser, don't write an option parser.
+Write a package manager, or a source control system, or a service
+restarter, or an operating system. You probably won't end up with a
+good one of those, but if you don't give up, and you are relentless and
+diligent enough in your procrastination, you may just end up with a very
+nice option parser.
+
+## USAGE
+
+ // my-program.js
+ var nopt = require("nopt")
+ , Stream = require("stream").Stream
+ , path = require("path")
+ , knownOpts = { "foo" : [String, null]
+ , "bar" : [Stream, Number]
+ , "baz" : path
+ , "bloo" : [ "big", "medium", "small" ]
+ , "flag" : Boolean
+ , "pick" : Boolean
+ , "many" : [String, Array]
+ }
+ , shortHands = { "foofoo" : ["--foo", "Mr. Foo"]
+ , "b7" : ["--bar", "7"]
+ , "m" : ["--bloo", "medium"]
+ , "p" : ["--pick"]
+ , "f" : ["--flag"]
+ }
+ // everything is optional.
+ // knownOpts and shorthands default to {}
+ // arg list defaults to process.argv
+ // slice defaults to 2
+ , parsed = nopt(knownOpts, shortHands, process.argv, 2)
+ console.log(parsed)
+
+This would give you support for any of the following:
+
+```bash
+$ node my-program.js --foo "blerp" --no-flag
+{ "foo" : "blerp", "flag" : false }
+
+$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag
+{ bar: 7, foo: "Mr. Hand", flag: true }
+
+$ node my-program.js --foo "blerp" -f -----p
+{ foo: "blerp", flag: true, pick: true }
+
+$ node my-program.js -fp --foofoo
+{ foo: "Mr. Foo", flag: true, pick: true }
+
+$ node my-program.js --foofoo -- -fp # -- stops the flag parsing.
+{ foo: "Mr. Foo", argv: { remain: ["-fp"] } }
+
+$ node my-program.js --blatzk 1000 -fp # unknown opts are ok.
+{ blatzk: 1000, flag: true, pick: true }
+
+$ node my-program.js --blatzk true -fp # but they need a value
+{ blatzk: true, flag: true, pick: true }
+
+$ node my-program.js --no-blatzk -fp # unless they start with "no-"
+{ blatzk: false, flag: true, pick: true }
+
+$ node my-program.js --baz b/a/z # known paths are resolved.
+{ baz: "/Users/isaacs/b/a/z" }
+
+# if Array is one of the types, then it can take many
+# values, and will always be an array. The other types provided
+# specify what types are allowed in the list.
+
+$ node my-program.js --many 1 --many null --many foo
+{ many: ["1", "null", "foo"] }
+
+$ node my-program.js --many foo
+{ many: ["foo"] }
+```
+
+Read the tests at the bottom of `lib/nopt.js` for more examples of
+what this puppy can do.
+
+## Types
+
+The following types are supported, and defined on `nopt.typeDefs`
+
+* String: A normal string. No parsing is done.
+* path: A file system path. Gets resolved against cwd if not absolute.
+* url: A url. If it doesn't parse, it isn't accepted.
+* Number: Must be numeric.
+* Date: Must parse as a date. If it does, and `Date` is one of the options,
+ then it will return a Date object, not a string.
+* Boolean: Must be either `true` or `false`. If an option is a boolean,
+ then it does not need a value, and its presence will imply `true` as
+ the value. To negate boolean flags, do `--no-whatever` or `--whatever
+ false`
+* NaN: Means that the option is strictly not allowed. Any value will
+ fail.
+* Stream: An object matching the "Stream" class in node. Valuable
+ for use when validating programmatically. (npm uses this to let you
+ supply any WriteStream on the `outfd` and `logfd` config options.)
+* Array: If `Array` is specified as one of the types, then the value
+ will be parsed as a list of options. This means that multiple values
+ can be specified, and that the value will always be an array.
+
+If a type is an array of values not on this list, then those are
+considered valid values. For instance, in the example above, the
+`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`,
+and any other value will be rejected.
+
+When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be
+interpreted as their JavaScript equivalents, and numeric values will be
+interpreted as a number.
+
+You can also mix types and values, or multiple types, in a list. For
+instance `{ blah: [Number, null] }` would allow a value to be set to
+either a Number or null.
+
+To define a new type, add it to `nopt.typeDefs`. Each item in that
+hash is an object with a `type` member and a `validate` method. The
+`type` member is an object that matches what goes in the type list. The
+`validate` method is a function that gets called with `validate(data,
+key, val)`. Validate methods should assign `data[key]` to the valid
+value of `val` if it can be handled properly, or return boolean
+`false` if it cannot.
+
+You can also call `nopt.clean(data, types, typeDefs)` to clean up a
+config object and remove its invalid properties.
+
+## Error Handling
+
+By default, nopt outputs a warning to standard error when invalid
+options are found. You can change this behavior by assigning a method
+to `nopt.invalidHandler`. This method will be called with
+the offending `nopt.invalidHandler(key, val, types)`.
+
+If no `nopt.invalidHandler` is assigned, then it will console.error
+its whining. If it is assigned to boolean `false` then the warning is
+suppressed.
+
+## Abbreviations
+
+Yes, they are supported. If you define options like this:
+
+```javascript
+{ "foolhardyelephants" : Boolean
+, "pileofmonkeys" : Boolean }
+```
+
+Then this will work:
+
+```bash
+node program.js --foolhar --pil
+node program.js --no-f --pileofmon
+# etc.
+```
+
+## Shorthands
+
+Shorthands are a hash of shorter option names to a snippet of args that
+they expand to.
+
+If multiple one-character shorthands are all combined, and the
+combination does not unambiguously match any other option or shorthand,
+then they will be broken up into their constituent parts. For example:
+
+```json
+{ "s" : ["--loglevel", "silent"]
+, "g" : "--global"
+, "f" : "--force"
+, "p" : "--parseable"
+, "l" : "--long"
+}
+```
+
+```bash
+npm ls -sgflp
+# just like doing this:
+npm ls --loglevel silent --global --force --long --parseable
+```
+
+## The Rest of the args
+
+The config object returned by nopt is given a special member called
+`argv`, which is an object with the following fields:
+
+* `remain`: The remaining args after all the parsing has occurred.
+* `original`: The args as they originally appeared.
+* `cooked`: The args after flags and shorthands are expanded.
+
+## Slicing
+
+Node programs are called with more or less the exact argv as it appears
+in C land, after the v8 and node-specific options have been plucked off.
+As such, `argv[0]` is always `node` and `argv[1]` is always the
+JavaScript program being run.
+
+That's usually not very useful to you. So they're sliced off by
+default. If you want them, then you can pass in `0` as the last
+argument, or any other number that you'd like to slice off the start of
+the list.
diff --git a/deps/npm/node_modules/nopt/bin/nopt.js b/deps/npm/node_modules/nopt/bin/nopt.js
new file mode 100755
index 0000000000..df90c729af
--- /dev/null
+++ b/deps/npm/node_modules/nopt/bin/nopt.js
@@ -0,0 +1,44 @@
+#!/usr/bin/env node
+var nopt = require("../lib/nopt")
+ , types = { num: Number
+ , bool: Boolean
+ , help: Boolean
+ , list: Array
+ , "num-list": [Number, Array]
+ , "str-list": [String, Array]
+ , "bool-list": [Boolean, Array]
+ , str: String }
+ , shorthands = { s: [ "--str", "astring" ]
+ , b: [ "--bool" ]
+ , nb: [ "--no-bool" ]
+ , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ]
+ , "?": ["--help"]
+ , h: ["--help"]
+ , H: ["--help"]
+ , n: [ "--num", "125" ] }
+ , parsed = nopt( types
+ , shorthands
+ , process.argv
+ , 2 )
+
+console.log("parsed", parsed)
+
+if (parsed.help) {
+ console.log("")
+ console.log("nopt cli tester")
+ console.log("")
+ console.log("types")
+ console.log(Object.keys(types).map(function M (t) {
+ var type = types[t]
+ if (Array.isArray(type)) {
+ return [t, type.map(function (type) { return type.name })]
+ }
+ return [t, type && type.name]
+ }).reduce(function (s, i) {
+ s[i[0]] = i[1]
+ return s
+ }, {}))
+ console.log("")
+ console.log("shorthands")
+ console.log(shorthands)
+}
diff --git a/deps/npm/node_modules/nopt/examples/my-program.js b/deps/npm/node_modules/nopt/examples/my-program.js
new file mode 100755
index 0000000000..142447e18e
--- /dev/null
+++ b/deps/npm/node_modules/nopt/examples/my-program.js
@@ -0,0 +1,30 @@
+#!/usr/bin/env node
+
+//process.env.DEBUG_NOPT = 1
+
+// my-program.js
+var nopt = require("../lib/nopt")
+ , Stream = require("stream").Stream
+ , path = require("path")
+ , knownOpts = { "foo" : [String, null]
+ , "bar" : [Stream, Number]
+ , "baz" : path
+ , "bloo" : [ "big", "medium", "small" ]
+ , "flag" : Boolean
+ , "pick" : Boolean
+ }
+ , shortHands = { "foofoo" : ["--foo", "Mr. Foo"]
+ , "b7" : ["--bar", "7"]
+ , "m" : ["--bloo", "medium"]
+ , "p" : ["--pick"]
+ , "f" : ["--flag", "true"]
+ , "g" : ["--flag"]
+ , "s" : "--flag"
+ }
+ // everything is optional.
+ // knownOpts and shorthands default to {}
+ // arg list defaults to process.argv
+ // slice defaults to 2
+ , parsed = nopt(knownOpts, shortHands, process.argv, 2)
+
+console.log("parsed =\n"+ require("util").inspect(parsed))
diff --git a/deps/npm/node_modules/nopt/lib/nopt.js b/deps/npm/node_modules/nopt/lib/nopt.js
new file mode 100644
index 0000000000..ff802dafe3
--- /dev/null
+++ b/deps/npm/node_modules/nopt/lib/nopt.js
@@ -0,0 +1,552 @@
+// info about each config option.
+
+var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG
+ ? function () { console.error.apply(console, arguments) }
+ : function () {}
+
+var url = require("url")
+ , path = require("path")
+ , Stream = require("stream").Stream
+ , abbrev = require("abbrev")
+
+module.exports = exports = nopt
+exports.clean = clean
+
+exports.typeDefs =
+ { String : { type: String, validate: validateString }
+ , Boolean : { type: Boolean, validate: validateBoolean }
+ , url : { type: url, validate: validateUrl }
+ , Number : { type: Number, validate: validateNumber }
+ , path : { type: path, validate: validatePath }
+ , Stream : { type: Stream, validate: validateStream }
+ , Date : { type: Date, validate: validateDate }
+ }
+
+function nopt (types, shorthands, args, slice) {
+ args = args || process.argv
+ types = types || {}
+ shorthands = shorthands || {}
+ if (typeof slice !== "number") slice = 2
+
+ debug(types, shorthands, args, slice)
+
+ args = args.slice(slice)
+ var data = {}
+ , key
+ , remain = []
+ , cooked = args
+ , original = args.slice(0)
+
+ parse(args, data, remain, types, shorthands)
+ // now data is full
+ clean(data, types, exports.typeDefs)
+ data.argv = {remain:remain,cooked:cooked,original:original}
+ data.argv.toString = function () {
+ return this.original.map(JSON.stringify).join(" ")
+ }
+ return data
+}
+
+function clean (data, types, typeDefs) {
+ typeDefs = typeDefs || exports.typeDefs
+ var remove = {}
+ , typeDefault = [false, true, null, String, Number]
+
+ Object.keys(data).forEach(function (k) {
+ if (k === "argv") return
+ var val = data[k]
+ , isArray = Array.isArray(val)
+ , type = types[k]
+ if (!isArray) val = [val]
+ if (!type) type = typeDefault
+ if (type === Array) type = typeDefault.concat(Array)
+ if (!Array.isArray(type)) type = [type]
+
+ debug("val=%j", val)
+ debug("types=", type)
+ val = val.map(function (val) {
+ // if it's an unknown value, then parse false/true/null/numbers/dates
+ if (typeof val === "string") {
+ debug("string %j", val)
+ val = val.trim()
+ if ((val === "null" && ~type.indexOf(null))
+ || (val === "true" &&
+ (~type.indexOf(true) || ~type.indexOf(Boolean)))
+ || (val === "false" &&
+ (~type.indexOf(false) || ~type.indexOf(Boolean)))) {
+ val = JSON.parse(val)
+ debug("jsonable %j", val)
+ } else if (~type.indexOf(Number) && !isNaN(val)) {
+ debug("convert to number", val)
+ val = +val
+ } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) {
+ debug("convert to date", val)
+ val = new Date(val)
+ }
+ }
+
+ if (!types.hasOwnProperty(k)) {
+ return val
+ }
+
+ // allow `--no-blah` to set 'blah' to null if null is allowed
+ if (val === false && ~type.indexOf(null) &&
+ !(~type.indexOf(false) || ~type.indexOf(Boolean))) {
+ val = null
+ }
+
+ var d = {}
+ d[k] = val
+ debug("prevalidated val", d, val, types[k])
+ if (!validate(d, k, val, types[k], typeDefs)) {
+ if (exports.invalidHandler) {
+ exports.invalidHandler(k, val, types[k], data)
+ } else if (exports.invalidHandler !== false) {
+ debug("invalid: "+k+"="+val, types[k])
+ }
+ return remove
+ }
+ debug("validated val", d, val, types[k])
+ return d[k]
+ }).filter(function (val) { return val !== remove })
+
+ if (!val.length) delete data[k]
+ else if (isArray) {
+ debug(isArray, data[k], val)
+ data[k] = val
+ } else data[k] = val[0]
+
+ debug("k=%s val=%j", k, val, data[k])
+ })
+}
+
+function validateString (data, k, val) {
+ data[k] = String(val)
+}
+
+function validatePath (data, k, val) {
+ data[k] = path.resolve(String(val))
+ return true
+}
+
+function validateNumber (data, k, val) {
+ debug("validate Number %j %j %j", k, val, isNaN(val))
+ if (isNaN(val)) return false
+ data[k] = +val
+}
+
+function validateDate (data, k, val) {
+ debug("validate Date %j %j %j", k, val, Date.parse(val))
+ var s = Date.parse(val)
+ if (isNaN(s)) return false
+ data[k] = new Date(val)
+}
+
+function validateBoolean (data, k, val) {
+ if (val instanceof Boolean) val = val.valueOf()
+ else if (typeof val === "string") {
+ if (!isNaN(val)) val = !!(+val)
+ else if (val === "null" || val === "false") val = false
+ else val = true
+ } else val = !!val
+ data[k] = val
+}
+
+function validateUrl (data, k, val) {
+ val = url.parse(String(val))
+ if (!val.host) return false
+ data[k] = val.href
+}
+
+function validateStream (data, k, val) {
+ if (!(val instanceof Stream)) return false
+ data[k] = val
+}
+
+function validate (data, k, val, type, typeDefs) {
+ // arrays are lists of types.
+ if (Array.isArray(type)) {
+ for (var i = 0, l = type.length; i < l; i ++) {
+ if (type[i] === Array) continue
+ if (validate(data, k, val, type[i], typeDefs)) return true
+ }
+ delete data[k]
+ return false
+ }
+
+ // an array of anything?
+ if (type === Array) return true
+
+ // NaN is poisonous. Means that something is not allowed.
+ if (type !== type) {
+ debug("Poison NaN", k, val, type)
+ delete data[k]
+ return false
+ }
+
+ // explicit list of values
+ if (val === type) {
+ debug("Explicitly allowed %j", val)
+ // if (isArray) (data[k] = data[k] || []).push(val)
+ // else data[k] = val
+ data[k] = val
+ return true
+ }
+
+ // now go through the list of typeDefs, validate against each one.
+ var ok = false
+ , types = Object.keys(typeDefs)
+ for (var i = 0, l = types.length; i < l; i ++) {
+ debug("test type %j %j %j", k, val, types[i])
+ var t = typeDefs[types[i]]
+ if (t && type === t.type) {
+ var d = {}
+ ok = false !== t.validate(d, k, val)
+ val = d[k]
+ if (ok) {
+ // if (isArray) (data[k] = data[k] || []).push(val)
+ // else data[k] = val
+ data[k] = val
+ break
+ }
+ }
+ }
+ debug("OK? %j (%j %j %j)", ok, k, val, types[i])
+
+ if (!ok) delete data[k]
+ return ok
+}
+
+function parse (args, data, remain, types, shorthands) {
+ debug("parse", args, data, remain)
+
+ var key = null
+ , abbrevs = abbrev(Object.keys(types))
+ , shortAbbr = abbrev(Object.keys(shorthands))
+
+ for (var i = 0; i < args.length; i ++) {
+ var arg = args[i]
+ debug("arg", arg)
+
+ if (arg.match(/^-{2,}$/)) {
+ // done with keys.
+ // the rest are args.
+ remain.push.apply(remain, args.slice(i + 1))
+ args[i] = "--"
+ break
+ }
+ if (arg.charAt(0) === "-") {
+ if (arg.indexOf("=") !== -1) {
+ var v = arg.split("=")
+ arg = v.shift()
+ v = v.join("=")
+ args.splice.apply(args, [i, 1].concat([arg, v]))
+ }
+ // see if it's a shorthand
+ // if so, splice and back up to re-parse it.
+ var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs)
+ debug("arg=%j shRes=%j", arg, shRes)
+ if (shRes) {
+ debug(arg, shRes)
+ args.splice.apply(args, [i, 1].concat(shRes))
+ if (arg !== shRes[0]) {
+ i --
+ continue
+ }
+ }
+ arg = arg.replace(/^-+/, "")
+ var no = false
+ while (arg.toLowerCase().indexOf("no-") === 0) {
+ no = !no
+ arg = arg.substr(3)
+ }
+
+ if (abbrevs[arg]) arg = abbrevs[arg]
+
+ var isArray = types[arg] === Array ||
+ Array.isArray(types[arg]) && types[arg].indexOf(Array) !== -1
+
+ var val
+ , la = args[i + 1]
+
+ var isBool = no ||
+ types[arg] === Boolean ||
+ Array.isArray(types[arg]) && types[arg].indexOf(Boolean) !== -1 ||
+ (la === "false" &&
+ (types[arg] === null ||
+ Array.isArray(types[arg]) && ~types[arg].indexOf(null)))
+
+ if (isBool) {
+ // just set and move along
+ val = !no
+ // however, also support --bool true or --bool false
+ if (la === "true" || la === "false") {
+ val = JSON.parse(la)
+ la = null
+ if (no) val = !val
+ i ++
+ }
+
+ // also support "foo":[Boolean, "bar"] and "--foo bar"
+ if (Array.isArray(types[arg]) && la) {
+ if (~types[arg].indexOf(la)) {
+ // an explicit type
+ val = la
+ i ++
+ } else if ( la === "null" && ~types[arg].indexOf(null) ) {
+ // null allowed
+ val = null
+ i ++
+ } else if ( !la.match(/^-{2,}[^-]/) &&
+ !isNaN(la) &&
+ ~types[arg].indexOf(Number) ) {
+ // number
+ val = +la
+ i ++
+ } else if ( !la.match(/^-[^-]/) && ~types[arg].indexOf(String) ) {
+ // string
+ val = la
+ i ++
+ }
+ }
+
+ if (isArray) (data[arg] = data[arg] || []).push(val)
+ else data[arg] = val
+
+ continue
+ }
+
+ if (la && la.match(/^-{2,}$/)) {
+ la = undefined
+ i --
+ }
+
+ val = la === undefined ? true : la
+ if (isArray) (data[arg] = data[arg] || []).push(val)
+ else data[arg] = val
+
+ i ++
+ continue
+ }
+ remain.push(arg)
+ }
+}
+
+function resolveShort (arg, shorthands, shortAbbr, abbrevs) {
+ // handle single-char shorthands glommed together, like
+ // npm ls -glp, but only if there is one dash, and only if
+ // all of the chars are single-char shorthands, and it's
+ // not a match to some other abbrev.
+ arg = arg.replace(/^-+/, '')
+ if (abbrevs[arg] && !shorthands[arg]) {
+ return null
+ }
+ if (shortAbbr[arg]) {
+ arg = shortAbbr[arg]
+ } else {
+ var singles = shorthands.___singles
+ if (!singles) {
+ singles = Object.keys(shorthands).filter(function (s) {
+ return s.length === 1
+ }).reduce(function (l,r) { l[r] = true ; return l }, {})
+ shorthands.___singles = singles
+ }
+ var chrs = arg.split("").filter(function (c) {
+ return singles[c]
+ })
+ if (chrs.join("") === arg) return chrs.map(function (c) {
+ return shorthands[c]
+ }).reduce(function (l, r) {
+ return l.concat(r)
+ }, [])
+ }
+
+ if (shorthands[arg] && !Array.isArray(shorthands[arg])) {
+ shorthands[arg] = shorthands[arg].split(/\s+/)
+ }
+ return shorthands[arg]
+}
+
+if (module === require.main) {
+var assert = require("assert")
+ , util = require("util")
+
+ , shorthands =
+ { s : ["--loglevel", "silent"]
+ , d : ["--loglevel", "info"]
+ , dd : ["--loglevel", "verbose"]
+ , ddd : ["--loglevel", "silly"]
+ , noreg : ["--no-registry"]
+ , reg : ["--registry"]
+ , "no-reg" : ["--no-registry"]
+ , silent : ["--loglevel", "silent"]
+ , verbose : ["--loglevel", "verbose"]
+ , h : ["--usage"]
+ , H : ["--usage"]
+ , "?" : ["--usage"]
+ , help : ["--usage"]
+ , v : ["--version"]
+ , f : ["--force"]
+ , desc : ["--description"]
+ , "no-desc" : ["--no-description"]
+ , "local" : ["--no-global"]
+ , l : ["--long"]
+ , p : ["--parseable"]
+ , porcelain : ["--parseable"]
+ , g : ["--global"]
+ }
+
+ , types =
+ { aoa: Array
+ , nullstream: [null, Stream]
+ , date: Date
+ , str: String
+ , browser : String
+ , cache : path
+ , color : ["always", Boolean]
+ , depth : Number
+ , description : Boolean
+ , dev : Boolean
+ , editor : path
+ , force : Boolean
+ , global : Boolean
+ , globalconfig : path
+ , group : [String, Number]
+ , gzipbin : String
+ , logfd : [Number, Stream]
+ , loglevel : ["silent","win","error","warn","info","verbose","silly"]
+ , long : Boolean
+ , "node-version" : [false, String]
+ , npaturl : url
+ , npat : Boolean
+ , "onload-script" : [false, String]
+ , outfd : [Number, Stream]
+ , parseable : Boolean
+ , pre: Boolean
+ , prefix: path
+ , proxy : url
+ , "rebuild-bundle" : Boolean
+ , registry : url
+ , searchopts : String
+ , searchexclude: [null, String]
+ , shell : path
+ , t: [Array, String]
+ , tag : String
+ , tar : String
+ , tmp : path
+ , "unsafe-perm" : Boolean
+ , usage : Boolean
+ , user : String
+ , username : String
+ , userconfig : path
+ , version : Boolean
+ , viewer: path
+ , _exit : Boolean
+ }
+
+; [["-v", {version:true}, []]
+ ,["---v", {version:true}, []]
+ ,["ls -s --no-reg connect -d",
+ {loglevel:"info",registry:null},["ls","connect"]]
+ ,["ls ---s foo",{loglevel:"silent"},["ls","foo"]]
+ ,["ls --registry blargle", {}, ["ls"]]
+ ,["--no-registry", {registry:null}, []]
+ ,["--no-color true", {color:false}, []]
+ ,["--no-color false", {color:true}, []]
+ ,["--no-color", {color:false}, []]
+ ,["--color false", {color:false}, []]
+ ,["--color --logfd 7", {logfd:7,color:true}, []]
+ ,["--color=true", {color:true}, []]
+ ,["--logfd=10", {logfd:10}, []]
+ ,["--tmp=/tmp -tar=gtar",{tmp:"/tmp",tar:"gtar"},[]]
+ ,["--tmp=tmp -tar=gtar",
+ {tmp:path.resolve(process.cwd(), "tmp"),tar:"gtar"},[]]
+ ,["--logfd x", {}, []]
+ ,["a -true -- -no-false", {true:true},["a","-no-false"]]
+ ,["a -no-false", {false:false},["a"]]
+ ,["a -no-no-true", {true:true}, ["a"]]
+ ,["a -no-no-no-false", {false:false}, ["a"]]
+ ,["---NO-no-No-no-no-no-nO-no-no"+
+ "-No-no-no-no-no-no-no-no-no"+
+ "-no-no-no-no-NO-NO-no-no-no-no-no-no"+
+ "-no-body-can-do-the-boogaloo-like-I-do"
+ ,{"body-can-do-the-boogaloo-like-I-do":false}, []]
+ ,["we are -no-strangers-to-love "+
+ "--you-know the-rules --and so-do-i "+
+ "---im-thinking-of=a-full-commitment "+
+ "--no-you-would-get-this-from-any-other-guy "+
+ "--no-gonna-give-you-up "+
+ "-no-gonna-let-you-down=true "+
+ "--no-no-gonna-run-around false "+
+ "--desert-you=false "+
+ "--make-you-cry false "+
+ "--no-tell-a-lie "+
+ "--no-no-and-hurt-you false"
+ ,{"strangers-to-love":false
+ ,"you-know":"the-rules"
+ ,"and":"so-do-i"
+ ,"you-would-get-this-from-any-other-guy":false
+ ,"gonna-give-you-up":false
+ ,"gonna-let-you-down":false
+ ,"gonna-run-around":false
+ ,"desert-you":false
+ ,"make-you-cry":false
+ ,"tell-a-lie":false
+ ,"and-hurt-you":false
+ },["we", "are"]]
+ ,["-t one -t two -t three"
+ ,{t: ["one", "two", "three"]}
+ ,[]]
+ ,["-t one -t null -t three four five null"
+ ,{t: ["one", "null", "three"]}
+ ,["four", "five", "null"]]
+ ,["-t foo"
+ ,{t:["foo"]}
+ ,[]]
+ ,["--no-t"
+ ,{t:["false"]}
+ ,[]]
+ ,["-no-no-t"
+ ,{t:["true"]}
+ ,[]]
+ ,["-aoa one -aoa null -aoa 100"
+ ,{aoa:["one", null, 100]}
+ ,[]]
+ ,["-str 100"
+ ,{str:"100"}
+ ,[]]
+ ,["--color always"
+ ,{color:"always"}
+ ,[]]
+ ,["--no-nullstream"
+ ,{nullstream:null}
+ ,[]]
+ ,["--nullstream false"
+ ,{nullstream:null}
+ ,[]]
+ ,["--notadate 2011-01-25"
+ ,{notadate: "2011-01-25"}
+ ,[]]
+ ,["--date 2011-01-25"
+ ,{date: new Date("2011-01-25")}
+ ,[]]
+ ].forEach(function (test) {
+ var argv = test[0].split(/\s+/)
+ , opts = test[1]
+ , rem = test[2]
+ , actual = nopt(types, shorthands, argv, 0)
+ , parsed = actual.argv
+ delete actual.argv
+ console.log(util.inspect(actual, false, 2, true), parsed.remain)
+ for (var i in opts) {
+ var e = JSON.stringify(opts[i])
+ , a = JSON.stringify(actual[i] === undefined ? null : actual[i])
+ if (e && typeof e === "object") {
+ assert.deepEqual(e, a)
+ } else {
+ assert.equal(e, a)
+ }
+ }
+ assert.deepEqual(rem, parsed.remain)
+ })
+}
diff --git a/deps/npm/node_modules/nopt/package.json b/deps/npm/node_modules/nopt/package.json
new file mode 100644
index 0000000000..d1118e3999
--- /dev/null
+++ b/deps/npm/node_modules/nopt/package.json
@@ -0,0 +1,12 @@
+{ "name" : "nopt"
+, "version" : "1.0.10"
+, "description" : "Option parsing for Node, supporting types, shorthands, etc. Used by npm."
+, "author" : "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)"
+, "main" : "lib/nopt.js"
+, "scripts" : { "test" : "node lib/nopt.js" }
+, "repository" : "http://github.com/isaacs/nopt"
+, "bin" : "./bin/nopt.js"
+, "license" :
+ { "type" : "MIT"
+ , "url" : "https://github.com/isaacs/nopt/raw/master/LICENSE" }
+, "dependencies" : { "abbrev" : "1" }}
diff --git a/deps/npm/node_modules/proto-list/LICENSE b/deps/npm/node_modules/proto-list/LICENSE
new file mode 100644
index 0000000000..05a4010949
--- /dev/null
+++ b/deps/npm/node_modules/proto-list/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/proto-list/README.md b/deps/npm/node_modules/proto-list/README.md
new file mode 100644
index 0000000000..43cfa35893
--- /dev/null
+++ b/deps/npm/node_modules/proto-list/README.md
@@ -0,0 +1,3 @@
+A list of objects, bound by their prototype chain.
+
+Used in npm's config stuff.
diff --git a/deps/npm/node_modules/proto-list/package.json b/deps/npm/node_modules/proto-list/package.json
new file mode 100644
index 0000000000..5cab34befe
--- /dev/null
+++ b/deps/npm/node_modules/proto-list/package.json
@@ -0,0 +1,9 @@
+{ "name" : "proto-list"
+, "version" : "1.0.0"
+, "description" : "A utility for managing a prototype chain"
+, "main" : "./proto-list.js"
+, "author" : "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)"
+, "scripts" : { "test" : "node proto-list.js" }
+, "repository": { "type": "git", "url": "https://github.com/isaacs/proto-list" }
+, "license": { "type": "MIT", "url": "https://github.com/isaacs/proto-list/blob/master/LICENSE" }
+, "devDependencies" : { "tap" : "0" } }
diff --git a/deps/npm/node_modules/proto-list/proto-list.js b/deps/npm/node_modules/proto-list/proto-list.js
new file mode 100644
index 0000000000..759d827382
--- /dev/null
+++ b/deps/npm/node_modules/proto-list/proto-list.js
@@ -0,0 +1,94 @@
+
+module.exports = ProtoList
+
+function ProtoList () { this.list = [] }
+ProtoList.prototype =
+ { get length () { return this.list.length }
+ , get keys () {
+ var k = []
+ for (var i in this.list[0]) k.push(i)
+ return k
+ }
+ , get snapshot () {
+ var o = {}
+ this.keys.forEach(function (k) { o[k] = this.get(k) }, this)
+ return o
+ }
+ , push : function (obj) {
+ if (typeof obj !== "object") obj = {valueOf:obj}
+ if (this.list.length >= 1) {
+ this.list[this.list.length - 1].__proto__ = obj
+ }
+ obj.__proto__ = Object.prototype
+ return this.list.push(obj)
+ }
+ , pop : function () {
+ if (this.list.length >= 2) {
+ this.list[this.list.length - 2].__proto__ = Object.prototype
+ }
+ return this.list.pop()
+ }
+ , unshift : function (obj) {
+ obj.__proto__ = this.list[0] || Object.prototype
+ return this.list.unshift(obj)
+ }
+ , shift : function () {
+ if (this.list.length >= 1) {
+ this.list[0].__proto__ = Object.prototype
+ }
+ return this.list.shift()
+ }
+ , get : function (key) {
+ return this.list[0][key]
+ }
+ , set : function (key, val, save) {
+ if (!this.length) this.push({})
+ if (save && this.list[0].hasOwnProperty(key)) this.push({})
+ return this.list[0][key] = val
+ }
+ , forEach : function (fn, thisp) {
+ for (var key in this.list[0]) fn.call(thisp, key, this.list[0][key])
+ }
+ , slice : function () {
+ return this.list.slice.apply(this.list, arguments)
+ }
+ , splice : function () {
+ return this.list.splice.apply(this.list, arguments)
+ }
+ }
+
+if (module === require.main) {
+
+var tap = require("tap")
+ , test = tap.test
+
+tap.plan(1)
+
+tap.test("protoList tests", function (t) {
+ var p = new ProtoList
+ p.push({foo:"bar"})
+ p.push({})
+ p.set("foo", "baz")
+ t.equal(p.get("foo"), "baz")
+
+ var p = new ProtoList
+ p.push({foo:"bar"})
+ p.set("foo", "baz")
+ t.equal(p.get("foo"), "baz")
+ t.equal(p.length, 1)
+ p.pop()
+ t.equal(p.length, 0)
+ p.set("foo", "asdf")
+ t.equal(p.length, 1)
+ t.equal(p.get("foo"), "asdf")
+ p.push({bar:"baz"})
+ t.equal(p.length, 2)
+ t.equal(p.get("foo"), "asdf")
+ p.shift()
+ t.equal(p.length, 1)
+ t.equal(p.get("foo"), undefined)
+ t.end()
+})
+
+
+}
diff --git a/deps/npm/node_modules/request/LICENSE b/deps/npm/node_modules/request/LICENSE
new file mode 100644
index 0000000000..a4a9aee0c2
--- /dev/null
+++ b/deps/npm/node_modules/request/LICENSE
@@ -0,0 +1,55 @@
+Apache License
+
+Version 2.0, January 2004
+
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+
+If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS \ No newline at end of file
diff --git a/deps/npm/node_modules/request/README.md b/deps/npm/node_modules/request/README.md
new file mode 100644
index 0000000000..315bdbaf4a
--- /dev/null
+++ b/deps/npm/node_modules/request/README.md
@@ -0,0 +1,193 @@
+# Request -- Simplified HTTP request method
+
+## Install
+
+<pre>
+ npm install request
+</pre>
+
+Or from source:
+
+<pre>
+ git clone git://github.com/mikeal/request.git
+ cd request
+ npm link
+</pre>
+
+## Super simple to use
+
+Request is designed to be the simplest way possible to make http calls. It support HTTPS and follows redirects by default.
+
+```javascript
+var request = require('request');
+request('http://www.google.com', function (error, response, body) {
+ if (!error && response.statusCode == 200) {
+ console.log(body) // Print the google web page.
+ }
+})
+```
+
+## Streaming
+
+You can stream any response to a file stream.
+
+```javascript
+request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))
+```
+
+You can also stream a file to a PUT or POST request. This method will also check the file extension against a mapping of file extensions to content-types, in this case `application/json`, and use the proper content-type in the PUT request if one is not already provided in the headers.
+
+```javascript
+fs.readStream('file.json').pipe(request.put('http://mysite.com/obj.json'))
+```
+
+Request can also pipe to itself. When doing so the content-type and content-length will be preserved in the PUT headers.
+
+```javascript
+request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'))
+```
+
+Now let's get fancy.
+
+```javascript
+http.createServer(function (req, resp) {
+ if (req.url === '/doodle.png') {
+ if (req.method === 'PUT') {
+ req.pipe(request.put('http://mysite.com/doodle.png'))
+ } else if (req.method === 'GET' || req.method === 'HEAD') {
+ request.get('http://mysite.com/doodle.png').pipe(resp)
+ }
+ }
+})
+```
+
+You can also pipe() from a http.ServerRequest instance and to a http.ServerResponse instance. The HTTP method and headers will be sent as well as the entity-body data. Which means that, if you don't really care about security, you can do:
+
+```javascript
+http.createServer(function (req, resp) {
+ if (req.url === '/doodle.png') {
+ var x = request('http://mysite.com/doodle.png')
+ req.pipe(x)
+ x.pipe(resp)
+ }
+})
+```
+
+And since pipe() returns the destination stream in node 0.5.x you can do one line proxying :)
+
+```javascript
+req.pipe(request('http://mysite.com/doodle.png')).pipe(resp)
+```
+
+Also, none of this new functionality conflicts with requests previous features, it just expands them.
+
+```javascript
+var r = request.defaults({'proxy':'http://localproxy.com'})
+
+http.createServer(function (req, resp) {
+ if (req.url === '/doodle.png') {
+ r.get('http://google.com/doodle.png').pipe(resp)
+ }
+})
+```
+
+You can still use intermediate proxies, the requests will still follow HTTP forwards, etc.
+
+### request(options, callback)
+
+The first argument can be either a url or an options object. The only required option is uri, all others are optional.
+
+* `uri` || `url` - fully qualified uri or a parsed url object from url.parse()
+* `method` - http method, defaults to GET
+* `headers` - http headers, defaults to {}
+* `body` - entity body for POST and PUT requests. Must be buffer or string.
+* `json` - sets `body` but to JSON representation of value and adds `Content-type: application/json` header.
+* `multipart` - (experimental) array of objects which contains their own headers and `body` attribute. Sends `multipart/related` request. See example below.
+* `followRedirect` - follow HTTP 3xx responses as redirects. defaults to true.
+* `maxRedirects` - the maximum number of redirects to follow, defaults to 10.
+* `onResponse` - If true the callback will be fired on the "response" event instead of "end". If a function it will be called on "response" and not effect the regular semantics of the main callback on "end".
+* `encoding` - Encoding to be used on response.setEncoding when buffering the response data.
+* `pool` - A hash object containing the agents for these requests. If omitted this request will use the global pool which is set to node's default maxSockets.
+* `pool.maxSockets` - Integer containing the maximum amount of sockets in the pool.
+* `timeout` - Integer containing the number of milliseconds to wait for a request to respond before aborting the request
+* `proxy` - An HTTP proxy to be used. Support proxy Auth with Basic Auth the same way it's supported with the `url` parameter by embedding the auth info in the uri.
+* `strictSSL` - Set to `true` to require that SSL certificates be valid. Note: to use your own certificate authority, you need to specify an agent that was created with that ca as an option.
+
+
+The callback argument gets 3 arguments. The first is an error when applicable (usually from the http.Client option not the http.ClientRequest object). The second in an http.ClientResponse object. The third is the response body buffer.
+
+## Convenience methods
+
+There are also shorthand methods for different HTTP METHODs and some other conveniences.
+
+### request.defaults(options)
+
+This method returns a wrapper around the normal request API that defaults to whatever options you pass in to it.
+
+### request.put
+
+Same as request() but defaults to `method: "PUT"`.
+
+```javascript
+request.put(url)
+```
+
+### request.post
+
+Same as request() but defaults to `method: "POST"`.
+
+```javascript
+request.post(url)
+```
+
+### request.head
+
+Same as request() but defaults to `method: "HEAD"`.
+
+```javascript
+request.head(url)
+```
+
+### request.del
+
+Same as request() but defaults to `method: "DELETE"`.
+
+```javascript
+request.del(url)
+```
+
+### request.get
+
+Alias to normal request method for uniformity.
+
+```javascript
+request.get(url)
+```
+
+
+## Examples:
+
+```javascript
+ var request = require('request')
+ , rand = Math.floor(Math.random()*100000000).toString()
+ ;
+ request(
+ { method: 'PUT'
+ , uri: 'http://mikeal.couchone.com/testjs/' + rand
+ , multipart:
+ [ { 'content-type': 'application/json'
+ , body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})
+ }
+ , { body: 'I am an attachment' }
+ ]
+ }
+ , function (error, response, body) {
+ if(response.statusCode == 201){
+ console.log('document saved as: http://mikeal.couchone.com/testjs/'+ rand)
+ } else {
+ console.log('error: '+ response.statusCode)
+ console.log(body)
+ }
+ }
+ )
+```
diff --git a/deps/npm/node_modules/request/main.js b/deps/npm/node_modules/request/main.js
new file mode 100644
index 0000000000..e328e9b8b6
--- /dev/null
+++ b/deps/npm/node_modules/request/main.js
@@ -0,0 +1,506 @@
+// Copyright 2010-2011 Mikeal Rogers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+var http = require('http')
+ , https = false
+ , tls = false
+ , url = require('url')
+ , util = require('util')
+ , stream = require('stream')
+ , qs = require('querystring')
+ , mimetypes = require('./mimetypes')
+ ;
+
+try {
+ https = require('https')
+} catch (e) {}
+
+try {
+ tls = require('tls')
+} catch (e) {}
+
+function toBase64 (str) {
+ return (new Buffer(str || "", "ascii")).toString("base64")
+}
+
+// Hacky fix for pre-0.4.4 https
+if (https && !https.Agent) {
+ https.Agent = function (options) {
+ http.Agent.call(this, options)
+ }
+ util.inherits(https.Agent, http.Agent)
+ https.Agent.prototype._getConnection = function(host, port, cb) {
+ var s = tls.connect(port, host, this.options, function() {
+ // do other checks here?
+ if (cb) cb()
+ })
+ return s
+ }
+}
+
+function isReadStream (rs) {
+ if (rs.readable && rs.path && rs.mode) {
+ return true
+ }
+}
+
+function copy (obj) {
+ var o = {}
+ for (var i in obj) o[i] = obj[i]
+ return o
+}
+
+var isUrl = /^https?:/
+
+var globalPool = {}
+
+function Request (options) {
+ stream.Stream.call(this)
+ this.readable = true
+ this.writable = true
+
+ if (typeof options === 'string') {
+ options = {uri:options}
+ }
+
+ for (var i in options) {
+ this[i] = options[i]
+ }
+ if (!this.pool) this.pool = globalPool
+ this.dests = []
+ this.__isRequestRequest = true
+}
+util.inherits(Request, stream.Stream)
+Request.prototype.getAgent = function (host, port) {
+ if (!this.pool[host+':'+port]) {
+ this.pool[host+':'+port] = new this.httpModule.Agent({host:host, port:port})
+ }
+ return this.pool[host+':'+port]
+}
+Request.prototype.request = function () {
+ var self = this
+
+ // Protect against double callback
+ if (!self._callback && self.callback) {
+ self._callback = self.callback
+ self.callback = function () {
+ if (self._callbackCalled) return // Print a warning maybe?
+ self._callback.apply(self, arguments)
+ self._callbackCalled = true
+ }
+ }
+
+ if (self.url) {
+ // People use this property instead all the time so why not just support it.
+ self.uri = self.url
+ delete self.url
+ }
+
+ if (!self.uri) {
+ throw new Error("options.uri is a required argument")
+ } else {
+ if (typeof self.uri == "string") self.uri = url.parse(self.uri)
+ }
+ if (self.proxy) {
+ if (typeof self.proxy == 'string') self.proxy = url.parse(self.proxy)
+ }
+
+ self._redirectsFollowed = self._redirectsFollowed || 0
+ self.maxRedirects = (self.maxRedirects !== undefined) ? self.maxRedirects : 10
+ self.followRedirect = (self.followRedirect !== undefined) ? self.followRedirect : true
+ if (self.followRedirect)
+ self.redirects = self.redirects || []
+
+ self.headers = self.headers ? copy(self.headers) : {}
+
+ var setHost = false
+ if (!self.headers.host) {
+ self.headers.host = self.uri.hostname
+ if (self.uri.port) {
+ if ( !(self.uri.port === 80 && self.uri.protocol === 'http:') &&
+ !(self.uri.port === 443 && self.uri.protocol === 'https:') )
+ self.headers.host += (':'+self.uri.port)
+ }
+ setHost = true
+ }
+
+ if (!self.uri.pathname) {self.uri.pathname = '/'}
+ if (!self.uri.port) {
+ if (self.uri.protocol == 'http:') {self.uri.port = 80}
+ else if (self.uri.protocol == 'https:') {self.uri.port = 443}
+ }
+
+ if (self.proxy) {
+ self.port = self.proxy.port
+ self.host = self.proxy.hostname
+ } else {
+ self.port = self.uri.port
+ self.host = self.uri.hostname
+ }
+
+ if (self.onResponse === true) {
+ self.onResponse = self.callback
+ delete self.callback
+ }
+
+ var clientErrorHandler = function (error) {
+ if (setHost) delete self.headers.host
+ if (self.timeout && self.timeoutTimer) clearTimeout(self.timeoutTimer)
+ self.emit('error', error)
+ }
+ if (self.onResponse) self.on('error', function (e) {self.onResponse(e)})
+ if (self.callback) self.on('error', function (e) {self.callback(e)})
+
+
+ if (self.uri.auth && !self.headers.authorization) {
+ self.headers.authorization = "Basic " + toBase64(self.uri.auth.split(':').map(function(item){ return qs.unescape(item)}).join(':'))
+ }
+ if (self.proxy && self.proxy.auth && !self.headers['proxy-authorization']) {
+ self.headers['proxy-authorization'] = "Basic " + toBase64(self.proxy.auth.split(':').map(function(item){ return qs.unescape(item)}).join(':'))
+ }
+
+ if (self.uri.path) {
+ self.path = self.uri.path
+ } else {
+ self.path = self.uri.pathname + (self.uri.search || "")
+ }
+
+ if (self.path.length === 0) self.path = '/'
+
+ if (self.proxy) self.path = (self.uri.protocol + '//' + self.uri.host + self.path)
+
+ if (self.json) {
+ self.headers['content-type'] = 'application/json'
+ if (typeof self.json === 'boolean') {
+ if (typeof self.body === 'object') self.body = JSON.stringify(self.body)
+ } else {
+ self.body = JSON.stringify(self.json)
+ }
+
+ } else if (self.multipart) {
+ self.body = ''
+ self.headers['content-type'] = 'multipart/related;boundary="frontier"'
+ if (!self.multipart.forEach) throw new Error('Argument error, options.multipart.')
+
+ self.multipart.forEach(function (part) {
+ var body = part.body
+ if(!body) throw Error('Body attribute missing in multipart.')
+ delete part.body
+ self.body += '--frontier\r\n'
+ Object.keys(part).forEach(function(key){
+ self.body += key + ': ' + part[key] + '\r\n'
+ })
+ self.body += '\r\n' + body + '\r\n'
+ })
+ self.body += '--frontier--'
+ }
+
+ if (self.body) {
+ if (!Buffer.isBuffer(self.body)) {
+ self.body = new Buffer(self.body)
+ }
+ if (self.body.length) {
+ self.headers['content-length'] = self.body.length
+ } else {
+ throw new Error('Argument error, options.body.')
+ }
+ }
+
+ self.httpModule =
+ {"http:":http, "https:":https}[self.proxy ? self.proxy.protocol : self.uri.protocol]
+
+ if (!self.httpModule) throw new Error("Invalid protocol")
+
+ if (self.pool === false) {
+ self.agent = false
+ } else {
+ if (self.maxSockets) {
+ // Don't use our pooling if node has the refactored client
+ self.agent = self.httpModule.globalAgent || self.getAgent(self.host, self.port)
+ self.agent.maxSockets = self.maxSockets
+ }
+ if (self.pool.maxSockets) {
+ // Don't use our pooling if node has the refactored client
+ self.agent = self.httpModule.globalAgent || self.getAgent(self.host, self.port)
+ self.agent.maxSockets = self.pool.maxSockets
+ }
+ }
+
+ self.start = function () {
+ self._started = true
+ self.method = self.method || 'GET'
+
+ self.req = self.httpModule.request(self, function (response) {
+ self.response = response
+ response.request = self
+
+ if (self.httpModule === https &&
+ self.strictSSL &&
+ !response.client.authorized) {
+ var sslErr = response.client.authorizationError
+ self.emit('error', new Error('SSL Error: '+ sslErr))
+ return
+ }
+
+ if (setHost) delete self.headers.host
+ if (self.timeout && self.timeoutTimer) clearTimeout(self.timeoutTimer)
+
+ if (response.statusCode >= 300 &&
+ response.statusCode < 400 &&
+ self.followRedirect &&
+ self.method !== 'PUT' &&
+ self.method !== 'POST' &&
+ response.headers.location) {
+ if (self._redirectsFollowed >= self.maxRedirects) {
+ self.emit('error', new Error("Exceeded maxRedirects. Probably stuck in a redirect loop."))
+ return
+ }
+ self._redirectsFollowed += 1
+
+ if (!isUrl.test(response.headers.location)) {
+ response.headers.location = url.resolve(self.uri.href, response.headers.location)
+ }
+ self.uri = response.headers.location
+ self.redirects.push( { statusCode : response.statusCode,
+ redirectUri: response.headers.location })
+ delete self.req
+ delete self.agent
+ delete self._started
+ if (self.headers) {
+ delete self.headers.host
+ }
+ request(self, self.callback)
+ return // Ignore the rest of the response
+ } else {
+ self._redirectsFollowed = self._redirectsFollowed || 0
+ // Be a good stream and emit end when the response is finished.
+ // Hack to emit end on close because of a core bug that never fires end
+ response.on('close', function () {
+ if (!self._ended) self.response.emit('end')
+ })
+
+ if (self.encoding) {
+ if (self.dests.length !== 0) {
+ console.error("Ingoring encoding parameter as this stream is being piped to another stream which makes the encoding option invalid.")
+ } else {
+ response.setEncoding(self.encoding)
+ }
+ }
+
+ self.pipeDest = function (dest) {
+ if (dest.headers) {
+ dest.headers['content-type'] = response.headers['content-type']
+ if (response.headers['content-length']) {
+ dest.headers['content-length'] = response.headers['content-length']
+ }
+ }
+ if (dest.setHeader) {
+ for (var i in response.headers) {
+ dest.setHeader(i, response.headers[i])
+ }
+ dest.statusCode = response.statusCode
+ }
+ if (self.pipefilter) self.pipefilter(response, dest)
+ }
+
+ self.dests.forEach(function (dest) {
+ self.pipeDest(dest)
+ })
+
+ response.on("data", function (chunk) {
+ self._destdata = true
+ self.emit("data", chunk)
+ })
+ response.on("end", function (chunk) {
+ self._ended = true
+ self.emit("end", chunk)
+ })
+ response.on("close", function () {self.emit("close")})
+
+ self.emit('response', response)
+
+ if (self.onResponse) {
+ self.onResponse(null, response)
+ }
+ if (self.callback) {
+ var buffer = []
+ var bodyLen = 0
+ self.on("data", function (chunk) {
+ buffer.push(chunk)
+ bodyLen += chunk.length
+ })
+ self.on("end", function () {
+ if (buffer.length && Buffer.isBuffer(buffer[0])) {
+ var body = new Buffer(bodyLen)
+ var i = 0
+ buffer.forEach(function (chunk) {
+ chunk.copy(body, i, 0, chunk.length)
+ i += chunk.length
+ })
+ response.body = body.toString()
+ } else if (buffer.length) {
+ response.body = buffer.join('')
+ }
+
+ if (self.json) {
+ try {
+ response.body = JSON.parse(response.body)
+ } catch (e) {}
+ }
+ self.callback(null, response, response.body)
+ })
+ }
+ }
+ })
+
+ if (self.timeout) {
+ self.timeoutTimer = setTimeout(function() {
+ self.req.abort()
+ var e = new Error("ETIMEDOUT")
+ e.code = "ETIMEDOUT"
+ self.emit("error", e)
+ }, self.timeout)
+ }
+
+ self.req.on('error', clientErrorHandler)
+ }
+
+ self.once('pipe', function (src) {
+ if (self.ntick) throw new Error("You cannot pipe to this stream after the first nextTick() after creation of the request stream.")
+ self.src = src
+ if (isReadStream(src)) {
+ if (!self.headers['content-type'] && !self.headers['Content-Type'])
+ self.headers['content-type'] = mimetypes.lookup(src.path.slice(src.path.lastIndexOf('.')+1))
+ } else {
+ if (src.headers) {
+ for (var i in src.headers) {
+ if (!self.headers[i]) {
+ self.headers[i] = src.headers[i]
+ }
+ }
+ }
+ if (src.method && !self.method) {
+ self.method = src.method
+ }
+ }
+
+ self.on('pipe', function () {
+ console.error("You have already piped to this stream. Pipeing twice is likely to break the request.")
+ })
+ })
+
+ process.nextTick(function () {
+ if (self.body) {
+ self.write(self.body)
+ self.end()
+ } else if (self.requestBodyStream) {
+ console.warn("options.requestBodyStream is deprecated, please pass the request object to stream.pipe.")
+ self.requestBodyStream.pipe(self)
+ } else if (!self.src) {
+ self.headers['content-length'] = 0
+ self.end()
+ }
+ self.ntick = true
+ })
+}
+Request.prototype.pipe = function (dest) {
+ if (this.response) {
+ if (this._destdata) {
+ throw new Error("You cannot pipe after data has been emitted from the response.")
+ } else if (this._ended) {
+ throw new Error("You cannot pipe after the response has been ended.")
+ } else {
+ stream.Stream.prototype.pipe.call(this, dest)
+ this.pipeDest(dest)
+ return dest
+ }
+ } else {
+ this.dests.push(dest)
+ stream.Stream.prototype.pipe.call(this, dest)
+ return dest
+ }
+}
+Request.prototype.write = function () {
+ if (!this._started) this.start()
+ if (!this.req) throw new Error("This request has been piped before http.request() was called.")
+ this.req.write.apply(this.req, arguments)
+}
+Request.prototype.end = function () {
+ if (!this._started) this.start()
+ if (!this.req) throw new Error("This request has been piped before http.request() was called.")
+ this.req.end.apply(this.req, arguments)
+}
+Request.prototype.pause = function () {
+ if (!this.response) throw new Error("This request has been piped before http.request() was called.")
+ this.response.pause.apply(this.response, arguments)
+}
+Request.prototype.resume = function () {
+ if (!this.response) throw new Error("This request has been piped before http.request() was called.")
+ this.response.resume.apply(this.response, arguments)
+}
+
+function request (options, callback) {
+ if (typeof options === 'string') options = {uri:options}
+ if (callback) options.callback = callback
+ var r = new Request(options)
+ r.request()
+ return r
+}
+
+module.exports = request
+
+request.defaults = function (options) {
+ var def = function (method) {
+ var d = function (opts, callback) {
+ if (typeof opts === 'string') opts = {uri:opts}
+ for (var i in options) {
+ if (opts[i] === undefined) opts[i] = options[i]
+ }
+ return method(opts, callback)
+ }
+ return d
+ }
+ var de = def(request)
+ de.get = def(request.get)
+ de.post = def(request.post)
+ de.put = def(request.put)
+ de.head = def(request.head)
+ de.del = def(request.del)
+ return de
+}
+
+request.get = request
+request.post = function (options, callback) {
+ if (typeof options === 'string') options = {uri:options}
+ options.method = 'POST'
+ return request(options, callback)
+}
+request.put = function (options, callback) {
+ if (typeof options === 'string') options = {uri:options}
+ options.method = 'PUT'
+ return request(options, callback)
+}
+request.head = function (options, callback) {
+ if (typeof options === 'string') options = {uri:options}
+ options.method = 'HEAD'
+ if (options.body || options.requestBodyStream || options.json || options.multipart) {
+ throw new Error("HTTP HEAD requests MUST NOT include a request body.")
+ }
+ return request(options, callback)
+}
+request.del = function (options, callback) {
+ if (typeof options === 'string') options = {uri:options}
+ options.method = 'DELETE'
+ return request(options, callback)
+}
diff --git a/deps/npm/node_modules/request/mimetypes.js b/deps/npm/node_modules/request/mimetypes.js
new file mode 100644
index 0000000000..86910064c9
--- /dev/null
+++ b/deps/npm/node_modules/request/mimetypes.js
@@ -0,0 +1,146 @@
+// from http://github.com/felixge/node-paperboy
+exports.types = {
+ "aiff":"audio/x-aiff",
+ "arj":"application/x-arj-compressed",
+ "asf":"video/x-ms-asf",
+ "asx":"video/x-ms-asx",
+ "au":"audio/ulaw",
+ "avi":"video/x-msvideo",
+ "bcpio":"application/x-bcpio",
+ "ccad":"application/clariscad",
+ "cod":"application/vnd.rim.cod",
+ "com":"application/x-msdos-program",
+ "cpio":"application/x-cpio",
+ "cpt":"application/mac-compactpro",
+ "csh":"application/x-csh",
+ "css":"text/css",
+ "deb":"application/x-debian-package",
+ "dl":"video/dl",
+ "doc":"application/msword",
+ "drw":"application/drafting",
+ "dvi":"application/x-dvi",
+ "dwg":"application/acad",
+ "dxf":"application/dxf",
+ "dxr":"application/x-director",
+ "etx":"text/x-setext",
+ "ez":"application/andrew-inset",
+ "fli":"video/x-fli",
+ "flv":"video/x-flv",
+ "gif":"image/gif",
+ "gl":"video/gl",
+ "gtar":"application/x-gtar",
+ "gz":"application/x-gzip",
+ "hdf":"application/x-hdf",
+ "hqx":"application/mac-binhex40",
+ "html":"text/html",
+ "ice":"x-conference/x-cooltalk",
+ "ico":"image/x-icon",
+ "ief":"image/ief",
+ "igs":"model/iges",
+ "ips":"application/x-ipscript",
+ "ipx":"application/x-ipix",
+ "jad":"text/vnd.sun.j2me.app-descriptor",
+ "jar":"application/java-archive",
+ "jpeg":"image/jpeg",
+ "jpg":"image/jpeg",
+ "js":"text/javascript",
+ "json":"application/json",
+ "latex":"application/x-latex",
+ "lsp":"application/x-lisp",
+ "lzh":"application/octet-stream",
+ "m":"text/plain",
+ "m3u":"audio/x-mpegurl",
+ "man":"application/x-troff-man",
+ "me":"application/x-troff-me",
+ "midi":"audio/midi",
+ "mif":"application/x-mif",
+ "mime":"www/mime",
+ "movie":"video/x-sgi-movie",
+ "mustache":"text/plain",
+ "mp4":"video/mp4",
+ "mpg":"video/mpeg",
+ "mpga":"audio/mpeg",
+ "ms":"application/x-troff-ms",
+ "nc":"application/x-netcdf",
+ "oda":"application/oda",
+ "ogm":"application/ogg",
+ "pbm":"image/x-portable-bitmap",
+ "pdf":"application/pdf",
+ "pgm":"image/x-portable-graymap",
+ "pgn":"application/x-chess-pgn",
+ "pgp":"application/pgp",
+ "pm":"application/x-perl",
+ "png":"image/png",
+ "pnm":"image/x-portable-anymap",
+ "ppm":"image/x-portable-pixmap",
+ "ppz":"application/vnd.ms-powerpoint",
+ "pre":"application/x-freelance",
+ "prt":"application/pro_eng",
+ "ps":"application/postscript",
+ "qt":"video/quicktime",
+ "ra":"audio/x-realaudio",
+ "rar":"application/x-rar-compressed",
+ "ras":"image/x-cmu-raster",
+ "rgb":"image/x-rgb",
+ "rm":"audio/x-pn-realaudio",
+ "rpm":"audio/x-pn-realaudio-plugin",
+ "rtf":"text/rtf",
+ "rtx":"text/richtext",
+ "scm":"application/x-lotusscreencam",
+ "set":"application/set",
+ "sgml":"text/sgml",
+ "sh":"application/x-sh",
+ "shar":"application/x-shar",
+ "silo":"model/mesh",
+ "sit":"application/x-stuffit",
+ "skt":"application/x-koan",
+ "smil":"application/smil",
+ "snd":"audio/basic",
+ "sol":"application/solids",
+ "spl":"application/x-futuresplash",
+ "src":"application/x-wais-source",
+ "stl":"application/SLA",
+ "stp":"application/STEP",
+ "sv4cpio":"application/x-sv4cpio",
+ "sv4crc":"application/x-sv4crc",
+ "svg":"image/svg+xml",
+ "swf":"application/x-shockwave-flash",
+ "tar":"application/x-tar",
+ "tcl":"application/x-tcl",
+ "tex":"application/x-tex",
+ "texinfo":"application/x-texinfo",
+ "tgz":"application/x-tar-gz",
+ "tiff":"image/tiff",
+ "tr":"application/x-troff",
+ "tsi":"audio/TSP-audio",
+ "tsp":"application/dsptype",
+ "tsv":"text/tab-separated-values",
+ "unv":"application/i-deas",
+ "ustar":"application/x-ustar",
+ "vcd":"application/x-cdlink",
+ "vda":"application/vda",
+ "vivo":"video/vnd.vivo",
+ "vrm":"x-world/x-vrml",
+ "wav":"audio/x-wav",
+ "wax":"audio/x-ms-wax",
+ "wma":"audio/x-ms-wma",
+ "wmv":"video/x-ms-wmv",
+ "wmx":"video/x-ms-wmx",
+ "wrl":"model/vrml",
+ "wvx":"video/x-ms-wvx",
+ "xbm":"image/x-xbitmap",
+ "xlw":"application/vnd.ms-excel",
+ "xml":"text/xml",
+ "xpm":"image/x-xpixmap",
+ "xwd":"image/x-xwindowdump",
+ "xyz":"chemical/x-pdb",
+ "zip":"application/zip",
+};
+
+exports.lookup = function(ext, defaultType) {
+ defaultType = defaultType || 'application/octet-stream';
+
+ return (ext in exports.types)
+ ? exports.types[ext]
+ : defaultType;
+}; \ No newline at end of file
diff --git a/deps/npm/node_modules/request/package.json b/deps/npm/node_modules/request/package.json
new file mode 100644
index 0000000000..2d98aa2510
--- /dev/null
+++ b/deps/npm/node_modules/request/package.json
@@ -0,0 +1,15 @@
+{ "name" : "request"
+, "description" : "Simplified HTTP request client."
+, "tags" : ["http", "simple", "util", "utility"]
+, "version" : "2.1.1"
+, "author" : "Mikeal Rogers <mikeal.rogers@gmail.com>"
+, "repository" :
+ { "type" : "git"
+ , "url" : "http://github.com/mikeal/request.git"
+ }
+, "bugs" :
+ { "url" : "http://github.com/mikeal/request/issues" }
+, "engines" : ["node >= 0.3.6"]
+, "main" : "./main"
+, "scripts": { "test": "bash tests/run.sh" }
+}
diff --git a/deps/npm/node_modules/request/tests/googledoodle.png b/deps/npm/node_modules/request/tests/googledoodle.png
new file mode 100644
index 0000000000..f80c9c52d3
--- /dev/null
+++ b/deps/npm/node_modules/request/tests/googledoodle.png
Binary files differ
diff --git a/deps/npm/node_modules/request/tests/run.sh b/deps/npm/node_modules/request/tests/run.sh
new file mode 100755
index 0000000000..57d0f64966
--- /dev/null
+++ b/deps/npm/node_modules/request/tests/run.sh
@@ -0,0 +1,6 @@
+FAILS=0
+for i in tests/test-*.js; do
+ echo $i
+ node $i || let FAILS++
+done
+exit $FAILS
diff --git a/deps/npm/node_modules/request/tests/server.js b/deps/npm/node_modules/request/tests/server.js
new file mode 100644
index 0000000000..bad1e50305
--- /dev/null
+++ b/deps/npm/node_modules/request/tests/server.js
@@ -0,0 +1,57 @@
+var http = require('http')
+ , events = require('events')
+ , stream = require('stream')
+ , assert = require('assert')
+ ;
+
+exports.createServer = function (port) {
+ port = port || 6767
+ var s = http.createServer(function (req, resp) {
+ s.emit(req.url, req, resp);
+ })
+ s.port = port
+ s.url = 'http://localhost:'+port
+ return s;
+}
+
+exports.createPostStream = function (text) {
+ var postStream = new stream.Stream();
+ postStream.writeable = true;
+ postStream.readable = true;
+ setTimeout(function () {postStream.emit('data', new Buffer(text)); postStream.emit('end')}, 0);
+ return postStream;
+}
+exports.createPostValidator = function (text) {
+ var l = function (req, resp) {
+ var r = '';
+ req.on('data', function (chunk) {r += chunk})
+ req.on('end', function () {
+ if (r !== text) console.log(r, text);
+ assert.equal(r, text)
+ resp.writeHead(200, {'content-type':'text/plain'})
+ resp.write('OK')
+ resp.end()
+ })
+ }
+ return l;
+}
+exports.createGetResponse = function (text, contentType) {
+ var l = function (req, resp) {
+ contentType = contentType || 'text/plain'
+ resp.writeHead(200, {'content-type':contentType})
+ resp.write(text)
+ resp.end()
+ }
+ return l;
+}
+exports.createChunkResponse = function (chunks, contentType) {
+ var l = function (req, resp) {
+ contentType = contentType || 'text/plain'
+ resp.writeHead(200, {'content-type':contentType})
+ chunks.forEach(function (chunk) {
+ resp.write(chunk)
+ })
+ resp.end()
+ }
+ return l;
+}
diff --git a/deps/npm/node_modules/request/tests/test-body.js b/deps/npm/node_modules/request/tests/test-body.js
new file mode 100644
index 0000000000..18ad5b9c6f
--- /dev/null
+++ b/deps/npm/node_modules/request/tests/test-body.js
@@ -0,0 +1,90 @@
+var server = require('./server')
+ , events = require('events')
+ , stream = require('stream')
+ , assert = require('assert')
+ , request = require('../main.js')
+ ;
+
+var s = server.createServer();
+
+var tests =
+ { testGet :
+ { resp : server.createGetResponse("TESTING!")
+ , expectBody: "TESTING!"
+ }
+ , testGetChunkBreak :
+ { resp : server.createChunkResponse(
+ [ new Buffer([239])
+ , new Buffer([163])
+ , new Buffer([191])
+ , new Buffer([206])
+ , new Buffer([169])
+ , new Buffer([226])
+ , new Buffer([152])
+ , new Buffer([131])
+ ])
+ , expectBody: "Ω☃"
+ }
+ , testGetJSON :
+ { resp : server.createGetResponse('{"test":true}', 'application/json')
+ , json : true
+ , expectBody: {"test":true}
+ }
+ , testPutString :
+ { resp : server.createPostValidator("PUTTINGDATA")
+ , method : "PUT"
+ , body : "PUTTINGDATA"
+ }
+ , testPutBuffer :
+ { resp : server.createPostValidator("PUTTINGDATA")
+ , method : "PUT"
+ , body : new Buffer("PUTTINGDATA")
+ }
+ , testPutJSON :
+ { resp : server.createPostValidator(JSON.stringify({foo: 'bar'}))
+ , method: "PUT"
+ , json: {foo: 'bar'}
+ }
+ , testPutMultipart :
+ { resp: server.createPostValidator(
+ '--frontier\r\n' +
+ 'content-type: text/html\r\n' +
+ '\r\n' +
+ '<html><body>Oh hi.</body></html>' +
+ '\r\n--frontier\r\n\r\n' +
+ 'Oh hi.' +
+ '\r\n--frontier--'
+ )
+ , method: "PUT"
+ , multipart:
+ [ {'content-type': 'text/html', 'body': '<html><body>Oh hi.</body></html>'}
+ , {'body': 'Oh hi.'}
+ ]
+ }
+ }
+
+s.listen(s.port, function () {
+
+ var counter = 0
+
+ for (i in tests) {
+ (function () {
+ var test = tests[i]
+ s.on('/'+i, test.resp)
+ test.uri = s.url + '/' + i
+ request(test, function (err, resp, body) {
+ if (err) throw err
+ if (test.expectBody) {
+ assert.deepEqual(test.expectBody, body)
+ }
+ counter = counter - 1;
+ if (counter === 0) {
+ console.log(Object.keys(tests).length+" tests passed.")
+ s.close()
+ }
+ })
+ counter++
+ })()
+ }
+})
+
diff --git a/deps/npm/node_modules/request/tests/test-errors.js b/deps/npm/node_modules/request/tests/test-errors.js
new file mode 100644
index 0000000000..e5df87b56c
--- /dev/null
+++ b/deps/npm/node_modules/request/tests/test-errors.js
@@ -0,0 +1,30 @@
+var server = require('./server')
+ , events = require('events')
+ , assert = require('assert')
+ , request = require('../main.js')
+ ;
+
+var local = 'http://localhost:8888/asdf'
+
+try {
+ request({uri:local, body:{}})
+ assert.fail("Should have throw")
+} catch(e) {
+ assert.equal(e.message, 'Argument error, options.body.')
+}
+
+try {
+ request({uri:local, multipart: 'foo'})
+ assert.fail("Should have throw")
+} catch(e) {
+ assert.equal(e.message, 'Argument error, options.multipart.')
+}
+
+try {
+ request({uri:local, multipart: [{}]})
+ assert.fail("Should have throw")
+} catch(e) {
+ assert.equal(e.message, 'Body attribute missing in multipart.')
+}
+
+console.log("All tests passed.") \ No newline at end of file
diff --git a/deps/npm/node_modules/request/tests/test-pipes.js b/deps/npm/node_modules/request/tests/test-pipes.js
new file mode 100644
index 0000000000..0774647a17
--- /dev/null
+++ b/deps/npm/node_modules/request/tests/test-pipes.js
@@ -0,0 +1,167 @@
+var server = require('./server')
+ , events = require('events')
+ , stream = require('stream')
+ , assert = require('assert')
+ , fs = require('fs')
+ , request = require('../main.js')
+ , path = require('path')
+ , util = require('util')
+ ;
+
+var s = server.createServer(3453);
+
+function ValidationStream(str) {
+ this.str = str
+ this.buf = ''
+ this.on('data', function (data) {
+ this.buf += data
+ })
+ this.on('end', function () {
+ assert.equal(this.str, this.buf)
+ })
+ this.writable = true
+}
+util.inherits(ValidationStream, stream.Stream)
+ValidationStream.prototype.write = function (chunk) {
+ this.emit('data', chunk)
+}
+ValidationStream.prototype.end = function (chunk) {
+ if (chunk) emit('data', chunk)
+ this.emit('end')
+}
+
+s.listen(s.port, function () {
+ counter = 0;
+
+ var check = function () {
+ counter = counter - 1
+ if (counter === 0) {
+ console.log('All tests passed.')
+ setTimeout(function () {
+ process.exit();
+ }, 500)
+ }
+ }
+
+ // Test pipeing to a request object
+ s.once('/push', server.createPostValidator("mydata"));
+
+ var mydata = new stream.Stream();
+ mydata.readable = true
+
+ counter++
+ var r1 = request.put({url:'http://localhost:3453/push'}, function () {
+ check();
+ })
+ mydata.pipe(r1)
+
+ mydata.emit('data', 'mydata');
+ mydata.emit('end');
+
+
+ // Test pipeing from a request object.
+ s.once('/pull', server.createGetResponse("mypulldata"));
+
+ var mypulldata = new stream.Stream();
+ mypulldata.writable = true
+
+ counter++
+ request({url:'http://localhost:3453/pull'}).pipe(mypulldata)
+
+ var d = '';
+
+ mypulldata.write = function (chunk) {
+ d += chunk;
+ }
+ mypulldata.end = function () {
+ assert.equal(d, 'mypulldata');
+ check();
+ };
+
+
+ s.on('/cat', function (req, resp) {
+ if (req.method === "GET") {
+ resp.writeHead(200, {'content-type':'text/plain-test', 'content-length':4});
+ resp.end('asdf')
+ } else if (req.method === "PUT") {
+ assert.equal(req.headers['content-type'], 'text/plain-test');
+ assert.equal(req.headers['content-length'], 4)
+ var validate = '';
+
+ req.on('data', function (chunk) {validate += chunk})
+ req.on('end', function () {
+ resp.writeHead(201);
+ resp.end();
+ assert.equal(validate, 'asdf');
+ check();
+ })
+ }
+ })
+ s.on('/pushjs', function (req, resp) {
+ if (req.method === "PUT") {
+ assert.equal(req.headers['content-type'], 'text/javascript');
+ check();
+ }
+ })
+ s.on('/catresp', function (req, resp) {
+ request.get('http://localhost:3453/cat').pipe(resp)
+ })
+ s.on('/doodle', function (req, resp) {
+ if (req.headers['x-oneline-proxy']) {
+ resp.setHeader('x-oneline-proxy', 'yup')
+ }
+ resp.writeHead('200', {'content-type':'image/png'})
+ fs.createReadStream(path.join(__dirname, 'googledoodle.png')).pipe(resp)
+ })
+ s.on('/onelineproxy', function (req, resp) {
+ var x = request('http://localhost:3453/doodle')
+ req.pipe(x)
+ x.pipe(resp)
+ })
+
+ counter++
+ fs.createReadStream(__filename).pipe(request.put('http://localhost:3453/pushjs'))
+
+ counter++
+ request.get('http://localhost:3453/cat').pipe(request.put('http://localhost:3453/cat'))
+
+ counter++
+ request.get('http://localhost:3453/catresp', function (e, resp, body) {
+ assert.equal(resp.headers['content-type'], 'text/plain-test');
+ assert.equal(resp.headers['content-length'], 4)
+ check();
+ })
+
+ var doodleWrite = fs.createWriteStream(path.join(__dirname, 'test.png'))
+
+ counter++
+ request.get('http://localhost:3453/doodle').pipe(doodleWrite)
+
+ doodleWrite.on('close', function () {
+ assert.deepEqual(fs.readFileSync(path.join(__dirname, 'googledoodle.png')), fs.readFileSync(path.join(__dirname, 'test.png')))
+ check()
+ })
+
+ process.on('exit', function () {
+ fs.unlinkSync(path.join(__dirname, 'test.png'))
+ })
+
+ counter++
+ request.get({uri:'http://localhost:3453/onelineproxy', headers:{'x-oneline-proxy':'nope'}}, function (err, resp, body) {
+ assert.equal(resp.headers['x-oneline-proxy'], 'yup')
+ check()
+ })
+
+ s.on('/afterresponse', function (req, resp) {
+ resp.write('d')
+ resp.end()
+ })
+
+ counter++
+ var afterresp = request.post('http://localhost:3453/afterresponse').on('response', function () {
+ var v = new ValidationStream('d')
+ afterresp.pipe(v)
+ v.on('end', check)
+ })
+
+})
diff --git a/deps/npm/node_modules/request/tests/test-timeout.js b/deps/npm/node_modules/request/tests/test-timeout.js
new file mode 100644
index 0000000000..673f8ad86f
--- /dev/null
+++ b/deps/npm/node_modules/request/tests/test-timeout.js
@@ -0,0 +1,87 @@
+var server = require('./server')
+ , events = require('events')
+ , stream = require('stream')
+ , assert = require('assert')
+ , request = require('../main.js')
+ ;
+
+var s = server.createServer();
+var expectedBody = "waited";
+var remainingTests = 5;
+
+s.listen(s.port, function () {
+ // Request that waits for 200ms
+ s.on('/timeout', function (req, resp) {
+ setTimeout(function(){
+ resp.writeHead(200, {'content-type':'text/plain'})
+ resp.write(expectedBody)
+ resp.end()
+ }, 200);
+ });
+
+ // Scenario that should timeout
+ var shouldTimeout = {
+ url: s.url + "/timeout",
+ timeout:100
+ }
+
+
+ request(shouldTimeout, function (err, resp, body) {
+ assert.equal(err.code, "ETIMEDOUT");
+ checkDone();
+ })
+
+
+ // Scenario that shouldn't timeout
+ var shouldntTimeout = {
+ url: s.url + "/timeout",
+ timeout:300
+ }
+
+ request(shouldntTimeout, function (err, resp, body) {
+ assert.equal(err, null);
+ assert.equal(expectedBody, body)
+ checkDone();
+ })
+
+ // Scenario with no timeout set, so shouldn't timeout
+ var noTimeout = {
+ url: s.url + "/timeout"
+ }
+
+ request(noTimeout, function (err, resp, body) {
+ assert.equal(err);
+ assert.equal(expectedBody, body)
+ checkDone();
+ })
+
+ // Scenario with a negative timeout value, should be treated a zero or the minimum delay
+ var negativeTimeout = {
+ url: s.url + "/timeout",
+ timeout:-1000
+ }
+
+ request(negativeTimeout, function (err, resp, body) {
+ assert.equal(err.code, "ETIMEDOUT");
+ checkDone();
+ })
+
+ // Scenario with a float timeout value, should be rounded by setTimeout anyway
+ var floatTimeout = {
+ url: s.url + "/timeout",
+ timeout: 100.76
+ }
+
+ request(floatTimeout, function (err, resp, body) {
+ assert.equal(err.code, "ETIMEDOUT");
+ checkDone();
+ })
+
+ function checkDone() {
+ if(--remainingTests == 0) {
+ s.close();
+ console.log("All tests passed.");
+ }
+ }
+})
+
diff --git a/deps/npm/node_modules/rimraf/AUTHORS b/deps/npm/node_modules/rimraf/AUTHORS
new file mode 100644
index 0000000000..008cbe7dd9
--- /dev/null
+++ b/deps/npm/node_modules/rimraf/AUTHORS
@@ -0,0 +1,5 @@
+# Authors sorted by whether or not they're me.
+Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)
+Wayne Larsen <wayne@larsen.st> (http://github.com/wvl)
+ritch <skawful@gmail.com>
+Marcel Laverdet
diff --git a/deps/npm/node_modules/rimraf/LICENSE b/deps/npm/node_modules/rimraf/LICENSE
new file mode 100644
index 0000000000..05a4010949
--- /dev/null
+++ b/deps/npm/node_modules/rimraf/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/rimraf/README.md b/deps/npm/node_modules/rimraf/README.md
new file mode 100644
index 0000000000..99983dc437
--- /dev/null
+++ b/deps/npm/node_modules/rimraf/README.md
@@ -0,0 +1,32 @@
+A `rm -rf` for node.
+
+Install with `npm install rimraf`, or just drop rimraf.js somewhere.
+
+## API
+
+`rimraf(f, [options,] callback)`
+
+The callback will be called with an error if there is one. Certain
+errors are handled for you:
+
+* `EBUSY` - rimraf will back off a maximum of opts.maxBusyTries times
+ before giving up.
+* `EMFILE` - If too many file descriptors get opened, rimraf will
+ patiently wait until more become available.
+
+## Options
+
+The options object is optional. These fields are respected:
+
+* `maxBusyTries` - The number of times to retry a file or folder in the
+ event of an `EBUSY` error. The default is 3.
+* `gently` - If provided a `gently` path, then rimraf will only delete
+ files and folders that are beneath this path, and only delete symbolic
+ links that point to a place within this path. (This is very important
+ to npm's use-case, and shows rimraf's pedigree.)
+
+
+## rimraf.sync
+
+It can remove stuff synchronously, too. But that's not so good. Use
+the async API. It's better.
diff --git a/deps/npm/node_modules/rimraf/fiber.js b/deps/npm/node_modules/rimraf/fiber.js
new file mode 100644
index 0000000000..8812a6b449
--- /dev/null
+++ b/deps/npm/node_modules/rimraf/fiber.js
@@ -0,0 +1,86 @@
+// fiber/future port originally written by Marcel Laverdet
+// https://gist.github.com/1131093
+// I updated it to bring to feature parity with cb version.
+// The bugs are probably mine, not Marcel's.
+// -- isaacs
+
+var path = require('path')
+ , fs = require('fs')
+ , Future = require('fibers/future')
+
+// Create future-returning fs functions
+var fs2 = {}
+for (var ii in fs) {
+ fs2[ii] = Future.wrap(fs[ii])
+}
+
+// Return a future which just pauses for a certain amount of time
+
+function timer (ms) {
+ var future = new Future
+ setTimeout(function () {
+ future.return()
+ }, ms)
+ return future
+}
+
+function realish (p) {
+ return path.resolve(path.dirname(fs2.readlink(p)))
+}
+
+// for EMFILE backoff.
+var timeout = 0
+ , EMFILE_MAX = 1000
+
+function rimraf_ (p, opts) {
+ opts = opts || {}
+ opts.maxBusyTries = opts.maxBusyTries || 3
+ if (opts.gently) opts.gently = path.resolve(opts.gently)
+ var busyTries = 0
+
+ // exits by throwing or returning.
+ // loops on handled errors.
+ while (true) {
+ try {
+ var stat = fs2.lstat(p).wait()
+
+ // check to make sure that symlinks are ours.
+ if (opts.gently) {
+ var rp = stat.isSymbolicLink() ? realish(p) : path.resolve(p)
+ if (rp.indexOf(opts.gently) !== 0) {
+ var er = new Error("Refusing to delete: "+p+" not in "+opts.gently)
+ er.errno = require("constants").EEXIST
+ er.code = "EEXIST"
+ er.path = p
+ throw er
+ }
+ }
+
+ if (!stat.isDirectory()) return fs2.unlink(p).wait()
+
+ var rimrafs = fs2.readdir(p).wait().map(function (file) {
+ return rimraf(path.join(p, file), opts)
+ })
+
+ Future.wait(rimrafs)
+ fs2.rmdir(p).wait()
+ timeout = 0
+ return
+
+ } catch (er) {
+ if (er.message.match(/^EMFILE/) && timeout < EMFILE_MAX) {
+ timer(timeout++).wait()
+ } else if (er.message.match(/^EBUSY/)
+ && busyTries < opt.maxBusyTries) {
+ timer(++busyTries * 100).wait()
+ } else if (er.message.match(/^ENOENT/)) {
+ // already gone
+ return
+ } else {
+ throw er
+ }
+ }
+ }
+}
+
+var rimraf = module.exports = rimraf_.future()
diff --git a/deps/npm/node_modules/rimraf/package.json b/deps/npm/node_modules/rimraf/package.json
new file mode 100644
index 0000000000..5b48d5bb2e
--- /dev/null
+++ b/deps/npm/node_modules/rimraf/package.json
@@ -0,0 +1,9 @@
+{"name":"rimraf"
+,"version":"1.0.8"
+,"main":"rimraf.js"
+,"description":"A deep deletion module for node (like `rm -rf`)"
+,"author":"Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)"
+,"license":
+ {"type":"MIT", "url": "https://github.com/isaacs/rimraf/raw/master/LICENSE"}
+,"repository":"git://github.com/isaacs/rimraf.git"
+,"scripts":{"test":"cd test && bash run.sh"}}
diff --git a/deps/npm/node_modules/rimraf/rimraf.js b/deps/npm/node_modules/rimraf/rimraf.js
new file mode 100644
index 0000000000..27e22669e3
--- /dev/null
+++ b/deps/npm/node_modules/rimraf/rimraf.js
@@ -0,0 +1,140 @@
+module.exports = rimraf
+rimraf.sync = rimrafSync
+
+var path = require("path")
+ , fs
+
+try {
+ // optional dependency
+ fs = require("graceful-fs")
+} catch (er) {
+ fs = require("fs")
+}
+
+var lstat = process.platform === "win32" ? "stat" : "lstat"
+ , lstatSync = lstat + "Sync"
+
+// for EMFILE handling
+var timeout = 0
+ , EMFILE_MAX = 1000
+
+function rimraf (p, opts, cb) {
+ if (typeof opts === "function") cb = opts, opts = {}
+
+ if (!cb) throw new Error("No callback passed to rimraf()")
+ if (!opts) opts = {}
+
+ var busyTries = 0
+ opts.maxBusyTries = opts.maxBusyTries || 3
+
+ if (opts.gently) opts.gently = path.resolve(opts.gently)
+
+ rimraf_(p, opts, function CB (er) {
+ if (er) {
+ if (er.message.match(/^EBUSY/) && busyTries < opts.maxBusyTries) {
+ var time = (opts.maxBusyTries - busyTries) * 100
+ busyTries ++
+ // try again, with the same exact callback as this one.
+ return setTimeout(function () {
+ rimraf_(p, opts, CB)
+ })
+ }
+
+ // this one won't happen if graceful-fs is used.
+ if (er.message.match(/^EMFILE/) && timeout < EMFILE_MAX) {
+ return setTimeout(function () {
+ rimraf_(p, opts, CB)
+ }, timeout ++)
+ }
+
+ // already gone
+ if (er.message.match(/^ENOENT/)) er = null
+ }
+
+ timeout = 0
+ cb(er)
+ })
+}
+
+function rimraf_ (p, opts, cb) {
+ fs[lstat](p, function (er, s) {
+ // if the stat fails, then assume it's already gone.
+ if (er) {
+ // already gone
+ if (er.message.match(/^ENOENT/)) return cb()
+ // some other kind of error, permissions, etc.
+ return cb(er)
+ }
+
+ // don't delete that don't point actually live in the "gently" path
+ if (opts.gently) return clobberTest(p, s, opts, cb)
+ return rm_(p, s, opts, cb)
+ })
+}
+
+function rm_ (p, s, opts, cb) {
+ if (!s.isDirectory()) return fs.unlink(p, cb)
+ fs.readdir(p, function (er, files) {
+ if (er) return cb(er)
+ asyncForEach(files.map(function (f) {
+ return path.join(p, f)
+ }), function (file, cb) {
+ rimraf(file, opts, cb)
+ }, function (er) {
+ if (er) return cb(er)
+ fs.rmdir(p, cb)
+ })
+ })
+}
+
+function clobberTest (p, s, opts, cb) {
+ var gently = opts.gently
+ if (!s.isSymbolicLink()) next(null, path.resolve(p))
+ else realish(p, next)
+
+ function next (er, rp) {
+ if (er) return rm_(p, s, cb)
+ if (rp.indexOf(gently) !== 0) return clobberFail(p, gently, cb)
+ else return rm_(p, s, opts, cb)
+ }
+}
+
+function realish (p, cb) {
+ fs.readlink(p, function (er, r) {
+ if (er) return cb(er)
+ return cb(null, path.resolve(path.dirname(p), r))
+ })
+}
+
+function clobberFail (p, g, cb) {
+ var er = new Error("Refusing to delete: "+p+" not in "+g)
+ , constants = require("constants")
+ er.errno = constants.EEXIST
+ er.code = "EEXIST"
+ er.path = p
+ return cb(er)
+}
+
+function asyncForEach (list, fn, cb) {
+ if (!list.length) cb()
+ var c = list.length
+ , errState = null
+ list.forEach(function (item, i, list) {
+ fn(item, function (er) {
+ if (errState) return
+ if (er) return cb(errState = er)
+ if (-- c === 0) return cb()
+ })
+ })
+}
+
+// this looks simpler, but it will fail with big directory trees,
+// or on slow stupid awful cygwin filesystems
+function rimrafSync (p) {
+ var s = fs[lstatSync](p)
+ if (!s.isDirectory()) return fs.unlinkSync(p)
+ fs.readdirSync(p).forEach(function (f) {
+ rimrafSync(path.join(p, f))
+ })
+ fs.rmdirSync(p)
+}
diff --git a/deps/npm/node_modules/rimraf/test/run.sh b/deps/npm/node_modules/rimraf/test/run.sh
new file mode 100644
index 0000000000..598f0163b2
--- /dev/null
+++ b/deps/npm/node_modules/rimraf/test/run.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+set -e
+for i in test-*.js; do
+ echo -n $i ...
+ bash setup.sh
+ node $i
+ ! [ -d target ]
+ echo "pass"
+done
+rm -rf target
diff --git a/deps/npm/node_modules/rimraf/test/setup.sh b/deps/npm/node_modules/rimraf/test/setup.sh
new file mode 100644
index 0000000000..2602e63160
--- /dev/null
+++ b/deps/npm/node_modules/rimraf/test/setup.sh
@@ -0,0 +1,47 @@
+#!/bin/bash
+
+set -e
+
+files=10
+folders=2
+depth=4
+target="$PWD/target"
+
+rm -rf target
+
+fill () {
+ local depth=$1
+ local files=$2
+ local folders=$3
+ local target=$4
+
+ if ! [ -d $target ]; then
+ mkdir -p $target
+ fi
+
+ local f
+
+ f=$files
+ while [ $f -gt 0 ]; do
+ touch "$target/f-$depth-$f"
+ let f--
+ done
+
+ let depth--
+
+ if [ $depth -le 0 ]; then
+ return 0
+ fi
+
+ f=$folders
+ while [ $f -gt 0 ]; do
+ mkdir "$target/folder-$depth-$f"
+ fill $depth $files $folders "$target/d-$depth-$f"
+ let f--
+ done
+}
+
+fill $depth $files $folders $target
+
+# sanity assert
+[ -d $target ]
diff --git a/deps/npm/node_modules/rimraf/test/test-async.js b/deps/npm/node_modules/rimraf/test/test-async.js
new file mode 100644
index 0000000000..9c2e0b7be0
--- /dev/null
+++ b/deps/npm/node_modules/rimraf/test/test-async.js
@@ -0,0 +1,5 @@
+var rimraf = require("../rimraf")
+ , path = require("path")
+rimraf(path.join(__dirname, "target"), function (er) {
+ if (er) throw er
+})
diff --git a/deps/npm/node_modules/rimraf/test/test-fiber.js b/deps/npm/node_modules/rimraf/test/test-fiber.js
new file mode 100644
index 0000000000..20d61a1099
--- /dev/null
+++ b/deps/npm/node_modules/rimraf/test/test-fiber.js
@@ -0,0 +1,15 @@
+var rimraf
+ , path = require("path")
+
+try {
+ rimraf = require("../fiber")
+} catch (er) {
+ console.error("skipping fiber test")
+}
+
+if (rimraf) {
+ Fiber(function () {
+ rimraf(path.join(__dirname, "target")).wait()
+ }).run()
+}
+
diff --git a/deps/npm/node_modules/rimraf/test/test-sync.js b/deps/npm/node_modules/rimraf/test/test-sync.js
new file mode 100644
index 0000000000..eb71f10476
--- /dev/null
+++ b/deps/npm/node_modules/rimraf/test/test-sync.js
@@ -0,0 +1,3 @@
+var rimraf = require("../rimraf")
+ , path = require("path")
+rimraf.sync(path.join(__dirname, "target"))
diff --git a/deps/npm/node_modules/semver/LICENSE b/deps/npm/node_modules/semver/LICENSE
new file mode 100644
index 0000000000..05a4010949
--- /dev/null
+++ b/deps/npm/node_modules/semver/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/semver/README.md b/deps/npm/node_modules/semver/README.md
new file mode 100644
index 0000000000..6fa37a3d89
--- /dev/null
+++ b/deps/npm/node_modules/semver/README.md
@@ -0,0 +1,119 @@
+semver(1) -- The semantic versioner for npm
+===========================================
+
+## Usage
+
+ $ npm install semver
+
+ semver.valid('1.2.3') // true
+ semver.valid('a.b.c') // false
+ semver.clean(' =v1.2.3 ') // '1.2.3'
+ semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true
+ semver.gt('1.2.3', '9.8.7') // false
+ semver.lt('1.2.3', '9.8.7') // true
+
+As a command-line utility:
+
+ $ semver -h
+
+ Usage: semver -v <version> [-r <range>]
+ Test if version(s) satisfy the supplied range(s),
+ and sort them.
+
+ Multiple versions or ranges may be supplied.
+
+ Program exits successfully if any valid version satisfies
+ all supplied ranges, and prints all satisfying versions.
+
+ If no versions are valid, or ranges are not satisfied,
+ then exits failure.
+
+ Versions are printed in ascending order, so supplying
+ multiple versions to the utility will just sort them.
+
+## Versions
+
+A version is the following things, in this order:
+
+* a number (Major)
+* a period
+* a number (minor)
+* a period
+* a number (patch)
+* OPTIONAL: a hyphen, followed by a number (build)
+* OPTIONAL: a collection of pretty much any non-whitespace characters
+ (tag)
+
+A leading `"="` or `"v"` character is stripped off and ignored.
+
+## Comparisons
+
+The ordering of versions is done using the following algorithm, given
+two versions and asked to find the greater of the two:
+
+* If the majors are numerically different, then take the one
+ with a bigger major number. `2.3.4 > 1.3.4`
+* If the minors are numerically different, then take the one
+ with the bigger minor number. `2.3.4 > 2.2.4`
+* If the patches are numerically different, then take the one with the
+ bigger patch number. `2.3.4 > 2.3.3`
+* If only one of them has a build number, then take the one with the
+ build number. `2.3.4-0 > 2.3.4`
+* If they both have build numbers, and the build numbers are numerically
+ different, then take the one with the bigger build number.
+ `2.3.4-10 > 2.3.4-9`
+* If only one of them has a tag, then take the one without the tag.
+ `2.3.4 > 2.3.4-beta`
+* If they both have tags, then take the one with the lexicographically
+ larger tag. `2.3.4-beta > 2.3.4-alpha`
+* At this point, they're equal.
+
+## Ranges
+
+The following range styles are supported:
+
+* `>1.2.3` Greater than a specific version.
+* `<1.2.3` Less than
+* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4`
+* `~1.2.3` := `>=1.2.3 <1.3.0`
+* `~1.2` := `>=1.2.0 <2.0.0`
+* `~1` := `>=1.0.0 <2.0.0`
+* `1.2.x` := `>=1.2.0 <1.3.0`
+* `1.x` := `>=1.0.0 <2.0.0`
+
+Ranges can be joined with either a space (which implies "and") or a
+`||` (which implies "or").
+
+## Functions
+
+* valid(v): Return the parsed version, or null if it's not valid.
+* inc(v, release): Return the version incremented by the release type
+ (major, minor, patch, or build), or null if it's not valid.
+
+### Comparison
+
+* gt(v1, v2): `v1 > v2`
+* gte(v1, v2): `v1 >= v2`
+* lt(v1, v2): `v1 < v2`
+* lte(v1, v2): `v1 <= v2`
+* eq(v1, v2): `v1 == v2` This is true if they're logically equivalent,
+ even if they're not the exact same string. You already know how to
+ compare strings.
+* neq(v1, v2): `v1 != v2` The opposite of eq.
+* cmp(v1, comparator, v2): Pass in a comparison string, and it'll call
+ the corresponding function above. `"==="` and `"!=="` do simple
+ string comparison, but are included for completeness. Throws if an
+ invalid comparison string is provided.
+* compare(v1, v2): Return 0 if v1 == v2, or 1 if v1 is greater, or -1 if
+ v2 is greater. Sorts in ascending order if passed to Array.sort().
+* rcompare(v1, v2): The reverse of compare. Sorts an array of versions
+ in descending order when passed to Array.sort().
+
+
+### Ranges
+
+* validRange(range): Return the valid range or null if it's not valid
+* satisfies(version, range): Return true if the version satisfies the
+ range.
+* maxSatisfying(versions, range): Return the highest version in the list
+ that satisfies the range, or null if none of them do.
diff --git a/deps/npm/node_modules/semver/bin/semver b/deps/npm/node_modules/semver/bin/semver
new file mode 100755
index 0000000000..3e6afb40d8
--- /dev/null
+++ b/deps/npm/node_modules/semver/bin/semver
@@ -0,0 +1,71 @@
+#!/usr/bin/env node
+// Standalone semver comparison program.
+// Exits successfully and prints matching version(s) if
+// any supplied version is valid and passes all tests.
+
+var argv = process.argv.slice(2)
+ , versions = []
+ , range = []
+ , gt = []
+ , lt = []
+ , eq = []
+ , semver = require("../semver")
+
+main()
+
+function main () {
+ if (!argv.length) return help()
+ while (argv.length) {
+ var a
+ switch (a = argv.shift()) {
+ case "-v": case "--version":
+ versions.push(argv.shift())
+ break
+ case "-r": case "--range":
+ range.push(argv.shift())
+ break
+ case "-h": case "--help": case "-?":
+ return help()
+ default:
+ versions.push(a)
+ break
+ }
+ }
+
+ versions = versions.filter(semver.valid)
+ for (var i = 0, l = range.length; i < l ; i ++) {
+ versions = versions.filter(function (v) {
+ return semver.satisfies(v, range[i])
+ })
+ if (!versions.length) return fail()
+ }
+ return success(versions)
+}
+
+function fail () { process.exit(1) }
+
+function success () {
+ versions.sort(semver.compare)
+ .map(semver.clean)
+ .forEach(function (v,i,_) { console.log(v) })
+}
+
+function help () {
+ console.log(["Usage: semver -v <version> [-r <range>]"
+ ,"Test if version(s) satisfy the supplied range(s),"
+ ,"and sort them."
+ ,""
+ ,"Multiple versions or ranges may be supplied."
+ ,""
+ ,"Program exits successfully if any valid version satisfies"
+ ,"all supplied ranges, and prints all satisfying versions."
+ ,""
+ ,"If no versions are valid, or ranges are not satisfied,"
+ ,"then exits failure."
+ ,""
+ ,"Versions are printed in ascending order, so supplying"
+ ,"multiple versions to the utility will just sort them."
+ ].join("\n"))
+}
+
+
diff --git a/deps/npm/node_modules/semver/package.json b/deps/npm/node_modules/semver/package.json
new file mode 100644
index 0000000000..cd10e341ce
--- /dev/null
+++ b/deps/npm/node_modules/semver/package.json
@@ -0,0 +1,11 @@
+{ "name" : "semver"
+, "version" : "1.0.12"
+, "description" : "The semantic version parser used by npm."
+, "main" : "semver.js"
+, "scripts" : { "test" : "tap test.js" }
+, "devDependencies": { "tap" : "0.x >=0.0.4" }
+, "license" :
+ { "type" : "MIT"
+ , "url" : "https://github.com/isaacs/semver/raw/master/LICENSE" }
+, "repository" : "git://github.com/isaacs/node-semver.git"
+, "bin" : { "semver" : "./bin/semver" } }
diff --git a/deps/npm/node_modules/semver/semver.js b/deps/npm/node_modules/semver/semver.js
new file mode 100644
index 0000000000..789b118b9f
--- /dev/null
+++ b/deps/npm/node_modules/semver/semver.js
@@ -0,0 +1,303 @@
+
+// See http://semver.org/
+// This implementation is a *hair* less strict in that it allows
+// v1.2.3 things, and also tags that don't begin with a char.
+
+var semver = "\\s*[v=]*\\s*([0-9]+)" // major
+ + "\\.([0-9]+)" // minor
+ + "\\.([0-9]+)" // patch
+ + "(-[0-9]+-?)?" // build
+ + "([a-zA-Z-][a-zA-Z0-9-\.:]*)?" // tag
+ , exprComparator = "^((<|>)?=?)\s*("+semver+")$|^$"
+ , xRangePlain = "[v=]*([0-9]+|x|X|\\*)"
+ + "(?:\\.([0-9]+|x|X|\\*)"
+ + "(?:\\.([0-9]+|x|X|\\*)"
+ + "([a-zA-Z-][a-zA-Z0-9-\.:]*)?)?)?"
+ , xRange = "((?:<|>)?=?)?\\s*" + xRangePlain
+ , exprSpermy = "(?:~>?)"+xRange
+ , expressions = exports.expressions =
+ { parse : new RegExp("^\\s*"+semver+"\\s*$")
+ , parsePackage : new RegExp("^\\s*([^\/]+)[-@](" +semver+")\\s*$")
+ , parseRange : new RegExp(
+ "^\\s*(" + semver + ")\\s+-\\s+(" + semver + ")\\s*$")
+ , validComparator : new RegExp("^"+exprComparator+"$")
+ , parseXRange : new RegExp("^"+xRange+"$")
+ , parseSpermy : new RegExp("^"+exprSpermy+"$")
+ }
+
+
+Object.getOwnPropertyNames(expressions).forEach(function (i) {
+ exports[i] = function (str) {
+ return ("" + (str || "")).match(expressions[i])
+ }
+})
+
+exports.rangeReplace = ">=$1 <=$7"
+exports.clean = clean
+exports.compare = compare
+exports.satisfies = satisfies
+exports.gt = gt
+exports.gte = gte
+exports.lt = lt
+exports.lte = lte
+exports.eq = eq
+exports.neq = neq
+exports.cmp = cmp
+exports.inc = inc
+
+exports.valid = valid
+exports.validPackage = validPackage
+exports.validRange = validRange
+exports.maxSatisfying = maxSatisfying
+
+exports.replaceStars = replaceStars
+exports.toComparators = toComparators
+
+function stringify (version) {
+ var v = version
+ return [v[1]||'', v[2]||'', v[3]||''].join(".") + (v[4]||'') + (v[5]||'')
+}
+
+function clean (version) {
+ version = exports.parse(version)
+ if (!version) return version
+ return stringify(version)
+}
+
+function valid (version) {
+ if (typeof version !== "string") return null
+ return exports.parse(version) && version.trim().replace(/^[v=]+/, '')
+}
+
+function validPackage (version) {
+ if (typeof version !== "string") return null
+ return version.match(expressions.parsePackage) && version.trim()
+}
+
+// range can be one of:
+// "1.0.3 - 2.0.0" range, inclusive, like ">=1.0.3 <=2.0.0"
+// ">1.0.2" like 1.0.3 - 9999.9999.9999
+// ">=1.0.2" like 1.0.2 - 9999.9999.9999
+// "<2.0.0" like 0.0.0 - 1.9999.9999
+// ">1.0.2 <2.0.0" like 1.0.3 - 1.9999.9999
+var starExpression = /(<|>)?=?\s*\*/g
+ , starReplace = ""
+ , compTrimExpression = new RegExp("((<|>)?=?)\\s*("
+ +semver+"|"+xRangePlain+")", "g")
+ , compTrimReplace = "$1$3"
+
+function toComparators (range) {
+ var ret = (range || "").trim()
+ .replace(expressions.parseRange, exports.rangeReplace)
+ .replace(compTrimExpression, compTrimReplace)
+ .split(/\s+/)
+ .join(" ")
+ .split("||")
+ .map(function (orchunk) {
+ return orchunk
+ .split(" ")
+ .map(replaceXRanges)
+ .map(replaceSpermies)
+ .map(replaceStars)
+ .join(" ").trim()
+ })
+ .map(function (orchunk) {
+ return orchunk
+ .trim()
+ .split(/\s+/)
+ .filter(function (c) { return c.match(expressions.validComparator) })
+ })
+ .filter(function (c) { return c.length })
+ return ret
+}
+
+function replaceStars (stars) {
+ return stars.trim().replace(starExpression, starReplace)
+}
+
+// "2.x","2.x.x" --> ">=2.0.0- <2.1.0-"
+// "2.3.x" --> ">=2.3.0- <2.4.0-"
+function replaceXRanges (ranges) {
+ return ranges.split(/\s+/)
+ .map(replaceXRange)
+ .join(" ")
+}
+
+function replaceXRange (version) {
+ return version.trim().replace(expressions.parseXRange,
+ function (v, gtlt, M, m, p, t) {
+ var anyX = !M || M.toLowerCase() === "x" || M === "*"
+ || !m || m.toLowerCase() === "x" || m === "*"
+ || !p || p.toLowerCase() === "x" || p === "*"
+ , ret = v
+
+ if (gtlt && anyX) {
+ // just replace x'es with zeroes
+ ;(!M || M === "*" || M.toLowerCase() === "x") && (M = 0)
+ ;(!m || m === "*" || m.toLowerCase() === "x") && (m = 0)
+ ;(!p || p === "*" || p.toLowerCase() === "x") && (p = 0)
+ ret = gtlt + M+"."+m+"."+p+"-"
+ } else if (!M || M === "*" || M.toLowerCase() === "x") {
+ ret = "*" // allow any
+ } else if (!m || m === "*" || m.toLowerCase() === "x") {
+ // append "-" onto the version, otherwise
+ // "1.x.x" matches "2.0.0beta", since the tag
+ // *lowers* the version value
+ ret = ">="+M+".0.0- <"+(+M+1)+".0.0-"
+ } else if (!p || p === "*" || p.toLowerCase() === "x") {
+ ret = ">="+M+"."+m+".0- <"+M+"."+(+m+1)+".0-"
+ }
+ //console.error("parseXRange", [].slice.call(arguments), ret)
+ return ret
+ })
+}
+
+// ~, ~> --> * (any, kinda silly)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0
+function replaceSpermies (version) {
+ return version.trim().replace(expressions.parseSpermy,
+ function (v, gtlt, M, m, p, t) {
+ if (gtlt) throw new Error(
+ "Using '"+gtlt+"' with ~ makes no sense. Don't do it.")
+
+ if (!M || M.toLowerCase() === "x") {
+ return ""
+ }
+ // ~1 == >=1.0.0- <2.0.0-
+ if (!m || m.toLowerCase() === "x") {
+ return ">="+M+".0.0- <"+(+M+1)+".0.0-"
+ }
+ // ~1.2 == >=1.2.0- <1.3.0-
+ if (!p || p.toLowerCase() === "x") {
+ return ">="+M+"."+m+".0- <"+M+"."+(+m+1)+".0-"
+ }
+ // ~1.2.3 == >=1.2.3- <1.3.0-
+ t = t || "-"
+ return ">="+M+"."+m+"."+p+t+" <"+M+"."+(+m+1)+".0-"
+ })
+}
+
+function validRange (range) {
+ range = replaceStars(range)
+ var c = toComparators(range)
+ return (c.length === 0)
+ ? null
+ : c.map(function (c) { return c.join(" ") }).join("||")
+}
+
+// returns the highest satisfying version in the list, or undefined
+function maxSatisfying (versions, range) {
+ return versions
+ .filter(function (v) { return satisfies(v, range) })
+ .sort(compare)
+ .pop()
+}
+function satisfies (version, range) {
+ version = valid(version)
+ if (!version) return false
+ range = toComparators(range)
+ for (var i = 0, l = range.length ; i < l ; i ++) {
+ var ok = false
+ for (var j = 0, ll = range[i].length ; j < ll ; j ++) {
+ var r = range[i][j]
+ , gtlt = r.charAt(0) === ">" ? gt
+ : r.charAt(0) === "<" ? lt
+ : false
+ , eq = r.charAt(!!gtlt) === "="
+ , sub = (!!eq) + (!!gtlt)
+ if (!gtlt) eq = true
+ r = r.substr(sub)
+ r = (r === "") ? r : valid(r)
+ ok = (r === "") || (eq && r === version) || (gtlt && gtlt(version, r))
+ if (!ok) break
+ }
+ if (ok) return true
+ }
+ return false
+}
+
+// return v1 > v2 ? 1 : -1
+function compare (v1, v2) {
+ var g = gt(v1, v2)
+ return g === null ? 0 : g ? 1 : -1
+}
+
+function rcompare (v1, v2) {
+ return compare(v2, v1)
+}
+
+function lt (v1, v2) { return gt(v2, v1) }
+function gte (v1, v2) { return !lt(v1, v2) }
+function lte (v1, v2) { return !gt(v1, v2) }
+function eq (v1, v2) { return gt(v1, v2) === null }
+function neq (v1, v2) { return gt(v1, v2) !== null }
+function cmp (v1, c, v2) {
+ switch (c) {
+ case ">": return gt(v1, v2)
+ case "<": return lt(v1, v2)
+ case ">=": return gte(v1, v2)
+ case "<=": return lte(v1, v2)
+ case "==": return eq(v1, v2)
+ case "!=": return neq(v1, v2)
+ case "===": return v1 === v2
+ case "!==": return v1 !== v2
+ default: throw new Error("Y U NO USE VALID COMPARATOR!? "+c)
+ }
+}
+
+// return v1 > v2
+function num (v) {
+ return v === undefined ? -1 : parseInt((v||"0").replace(/[^0-9]+/g, ''), 10)
+}
+function gt (v1, v2) {
+ v1 = exports.parse(v1)
+ v2 = exports.parse(v2)
+ if (!v1 || !v2) return false
+
+ for (var i = 1; i < 5; i ++) {
+ v1[i] = num(v1[i])
+ v2[i] = num(v2[i])
+ if (v1[i] > v2[i]) return true
+ else if (v1[i] !== v2[i]) return false
+ }
+ // no tag is > than any tag, or use lexicographical order.
+ var tag1 = v1[5] || ""
+ , tag2 = v2[5] || ""
+
+ // kludge: null means they were equal. falsey, and detectable.
+ // embarrassingly overclever, though, I know.
+ return tag1 === tag2 ? null
+ : !tag1 ? true
+ : !tag2 ? false
+ : tag1 > tag2
+}
+
+function inc (version, release) {
+ version = exports.parse(version)
+ if (!version) return null
+
+ var parsedIndexLookup =
+ { 'major': 1
+ , 'minor': 2
+ , 'patch': 3
+ , 'build': 4 }
+ var incIndex = parsedIndexLookup[release]
+ if (incIndex === undefined) return null
+
+ var current = num(version[incIndex])
+ version[incIndex] = current === -1 ? 1 : current + 1
+
+ for (var i = incIndex + 1; i < 5; i ++) {
+ if (num(version[i]) !== -1) version[i] = "0"
+ }
+
+ if (version[4]) version[4] = "-" + version[4]
+ version[5] = ""
+
+ return stringify(version)
+}
+
diff --git a/deps/npm/node_modules/semver/test.js b/deps/npm/node_modules/semver/test.js
new file mode 100644
index 0000000000..c28fe39701
--- /dev/null
+++ b/deps/npm/node_modules/semver/test.js
@@ -0,0 +1,397 @@
+var tap = require("tap")
+ , test = tap.test
+ , semver = require("./semver.js")
+ , eq = semver.eq
+ , gt = semver.gt
+ , lt = semver.lt
+ , neq = semver.neq
+ , cmp = semver.cmp
+ , gte = semver.gte
+ , lte = semver.lte
+ , satisfies = semver.satisfies
+ , validRange = semver.validRange
+ , inc = semver.inc
+ , replaceStars = semver.replaceStars
+ , toComparators = semver.toComparators
+
+tap.plan(8)
+
+test("\ncomparison tests", function (t) {
+; [ ["0.0.0", "0.0.0foo"]
+ , ["0.0.1", "0.0.0"]
+ , ["1.0.0", "0.9.9"]
+ , ["0.10.0", "0.9.0"]
+ , ["0.99.0", "0.10.0"]
+ , ["2.0.0", "1.2.3"]
+ , ["v0.0.0", "0.0.0foo"]
+ , ["v0.0.1", "0.0.0"]
+ , ["v1.0.0", "0.9.9"]
+ , ["v0.10.0", "0.9.0"]
+ , ["v0.99.0", "0.10.0"]
+ , ["v2.0.0", "1.2.3"]
+ , ["0.0.0", "v0.0.0foo"]
+ , ["0.0.1", "v0.0.0"]
+ , ["1.0.0", "v0.9.9"]
+ , ["0.10.0", "v0.9.0"]
+ , ["0.99.0", "v0.10.0"]
+ , ["2.0.0", "v1.2.3"]
+ , ["1.2.3", "1.2.3-asdf"]
+ , ["1.2.3-4", "1.2.3"]
+ , ["1.2.3-4-foo", "1.2.3"]
+ , ["1.2.3-5", "1.2.3-5-foo"]
+ , ["1.2.3-5", "1.2.3-4"]
+ , ["1.2.3-5-foo", "1.2.3-5-Foo"]
+ ].forEach(function (v) {
+ var v0 = v[0]
+ , v1 = v[1]
+ t.ok(gt(v0, v1), "gt('"+v0+"', '"+v1+"')")
+ t.ok(lt(v1, v0), "lt('"+v1+"', '"+v0+"')")
+ t.ok(!gt(v1, v0), "!gt('"+v1+"', '"+v0+"')")
+ t.ok(!lt(v0, v1), "!lt('"+v0+"', '"+v1+"')")
+ t.ok(eq(v0, v0), "eq('"+v0+"', '"+v0+"')")
+ t.ok(eq(v1, v1), "eq('"+v1+"', '"+v1+"')")
+ t.ok(neq(v0, v1), "neq('"+v0+"', '"+v1+"')")
+ t.ok(cmp(v1, "==", v1), "cmp('"+v1+"' == '"+v1+"')")
+ t.ok(cmp(v0, ">=", v1), "cmp('"+v0+"' >= '"+v1+"')")
+ t.ok(cmp(v1, "<=", v0), "cmp('"+v1+"' <= '"+v0+"')")
+ t.ok(cmp(v0, "!=", v1), "cmp('"+v0+"' != '"+v1+"')")
+ })
+ t.end()
+})
+
+test("\nequality tests", function (t) {
+; [ ["1.2.3", "v1.2.3"]
+ , ["1.2.3", "=1.2.3"]
+ , ["1.2.3", "v 1.2.3"]
+ , ["1.2.3", "= 1.2.3"]
+ , ["1.2.3", " v1.2.3"]
+ , ["1.2.3", " =1.2.3"]
+ , ["1.2.3", " v 1.2.3"]
+ , ["1.2.3", " = 1.2.3"]
+ , ["1.2.3-0", "v1.2.3-0"]
+ , ["1.2.3-0", "=1.2.3-0"]
+ , ["1.2.3-0", "v 1.2.3-0"]
+ , ["1.2.3-0", "= 1.2.3-0"]
+ , ["1.2.3-0", " v1.2.3-0"]
+ , ["1.2.3-0", " =1.2.3-0"]
+ , ["1.2.3-0", " v 1.2.3-0"]
+ , ["1.2.3-0", " = 1.2.3-0"]
+ , ["1.2.3-01", "v1.2.3-1"]
+ , ["1.2.3-01", "=1.2.3-1"]
+ , ["1.2.3-01", "v 1.2.3-1"]
+ , ["1.2.3-01", "= 1.2.3-1"]
+ , ["1.2.3-01", " v1.2.3-1"]
+ , ["1.2.3-01", " =1.2.3-1"]
+ , ["1.2.3-01", " v 1.2.3-1"]
+ , ["1.2.3-01", " = 1.2.3-1"]
+ , ["1.2.3beta", "v1.2.3beta"]
+ , ["1.2.3beta", "=1.2.3beta"]
+ , ["1.2.3beta", "v 1.2.3beta"]
+ , ["1.2.3beta", "= 1.2.3beta"]
+ , ["1.2.3beta", " v1.2.3beta"]
+ , ["1.2.3beta", " =1.2.3beta"]
+ , ["1.2.3beta", " v 1.2.3beta"]
+ , ["1.2.3beta", " = 1.2.3beta"]
+ ].forEach(function (v) {
+ var v0 = v[0]
+ , v1 = v[1]
+ t.ok(eq(v0, v1), "eq('"+v0+"', '"+v1+"')")
+ t.ok(!neq(v0, v1), "!neq('"+v0+"', '"+v1+"')")
+ t.ok(cmp(v0, "==", v1), "cmp("+v0+"=="+v1+")")
+ t.ok(!cmp(v0, "!=", v1), "!cmp("+v0+"!="+v1+")")
+ t.ok(!cmp(v0, "===", v1), "!cmp("+v0+"==="+v1+")")
+ t.ok(cmp(v0, "!==", v1), "cmp("+v0+"!=="+v1+")")
+ t.ok(!gt(v0, v1), "!gt('"+v0+"', '"+v1+"')")
+ t.ok(gte(v0, v1), "gte('"+v0+"', '"+v1+"')")
+ t.ok(!lt(v0, v1), "!lt('"+v0+"', '"+v1+"')")
+ t.ok(lte(v0, v1), "lte('"+v0+"', '"+v1+"')")
+ })
+ t.end()
+})
+
+
+test("\nrange tests", function (t) {
+; [ ["1.0.0 - 2.0.0", "1.2.3"]
+ , ["1.0.0", "1.0.0"]
+ , [">=*", "0.2.4"]
+ , ["", "1.0.0"]
+ , ["*", "1.2.3"]
+ , ["*", "v1.2.3-foo"]
+ , [">=1.0.0", "1.0.0"]
+ , [">=1.0.0", "1.0.1"]
+ , [">=1.0.0", "1.1.0"]
+ , [">1.0.0", "1.0.1"]
+ , [">1.0.0", "1.1.0"]
+ , ["<=2.0.0", "2.0.0"]
+ , ["<=2.0.0", "1.9999.9999"]
+ , ["<=2.0.0", "0.2.9"]
+ , ["<2.0.0", "1.9999.9999"]
+ , ["<2.0.0", "0.2.9"]
+ , [">= 1.0.0", "1.0.0"]
+ , [">= 1.0.0", "1.0.1"]
+ , [">= 1.0.0", "1.1.0"]
+ , ["> 1.0.0", "1.0.1"]
+ , ["> 1.0.0", "1.1.0"]
+ , ["<= 2.0.0", "2.0.0"]
+ , ["<= 2.0.0", "1.9999.9999"]
+ , ["<= 2.0.0", "0.2.9"]
+ , ["< 2.0.0", "1.9999.9999"]
+ , ["<\t2.0.0", "0.2.9"]
+ , [">=0.1.97", "v0.1.97"]
+ , [">=0.1.97", "0.1.97"]
+ , ["0.1.20 || 1.2.4", "1.2.4"]
+ , [">=0.2.3 || <0.0.1", "0.0.0"]
+ , [">=0.2.3 || <0.0.1", "0.2.3"]
+ , [">=0.2.3 || <0.0.1", "0.2.4"]
+ , ["||", "1.3.4"]
+ , ["2.x.x", "2.1.3"]
+ , ["1.2.x", "1.2.3"]
+ , ["1.2.x || 2.x", "2.1.3"]
+ , ["1.2.x || 2.x", "1.2.3"]
+ , ["x", "1.2.3"]
+ , ["2.*.*", "2.1.3"]
+ , ["1.2.*", "1.2.3"]
+ , ["1.2.* || 2.*", "2.1.3"]
+ , ["1.2.* || 2.*", "1.2.3"]
+ , ["*", "1.2.3"]
+ , ["2", "2.1.2"]
+ , ["2.3", "2.3.1"]
+ , ["~2.4", "2.4.0"] // >=2.4.0 <2.5.0
+ , ["~2.4", "2.4.5"]
+ , ["~>3.2.1", "3.2.2"] // >=3.2.1 <3.3.0
+ , ["~1", "1.2.3"] // >=1.0.0 <2.0.0
+ , ["~>1", "1.2.3"]
+ , ["~> 1", "1.2.3"]
+ , ["~1.0", "1.0.2"] // >=1.0.0 <1.1.0
+ , ["~ 1.0", "1.0.2"]
+ , [">=1", "1.0.0"]
+ , [">= 1", "1.0.0"]
+ , ["<1.2", "1.1.1"]
+ , ["< 1.2", "1.1.1"]
+ , ["1", "1.0.0beta"]
+ , ["~v0.5.4-pre", "0.5.5"]
+ , ["~v0.5.4-pre", "0.5.4"]
+ ].forEach(function (v) {
+ t.ok(satisfies(v[1], v[0]), v[0]+" satisfied by "+v[1])
+ })
+ t.end()
+})
+
+test("\nnegative range tests", function (t) {
+; [ ["1.0.0 - 2.0.0", "2.2.3"]
+ , ["1.0.0", "1.0.1"]
+ , [">=1.0.0", "0.0.0"]
+ , [">=1.0.0", "0.0.1"]
+ , [">=1.0.0", "0.1.0"]
+ , [">1.0.0", "0.0.1"]
+ , [">1.0.0", "0.1.0"]
+ , ["<=2.0.0", "3.0.0"]
+ , ["<=2.0.0", "2.9999.9999"]
+ , ["<=2.0.0", "2.2.9"]
+ , ["<2.0.0", "2.9999.9999"]
+ , ["<2.0.0", "2.2.9"]
+ , [">=0.1.97", "v0.1.93"]
+ , [">=0.1.97", "0.1.93"]
+ , ["0.1.20 || 1.2.4", "1.2.3"]
+ , [">=0.2.3 || <0.0.1", "0.0.3"]
+ , [">=0.2.3 || <0.0.1", "0.2.2"]
+ , ["2.x.x", "1.1.3"]
+ , ["2.x.x", "3.1.3"]
+ , ["1.2.x", "1.3.3"]
+ , ["1.2.x || 2.x", "3.1.3"]
+ , ["1.2.x || 2.x", "1.1.3"]
+ , ["2.*.*", "1.1.3"]
+ , ["2.*.*", "3.1.3"]
+ , ["1.2.*", "1.3.3"]
+ , ["1.2.* || 2.*", "3.1.3"]
+ , ["1.2.* || 2.*", "1.1.3"]
+ , ["2", "1.1.2"]
+ , ["2.3", "2.4.1"]
+ , ["~2.4", "2.5.0"] // >=2.4.0 <2.5.0
+ , ["~2.4", "2.3.9"]
+ , ["~>3.2.1", "3.3.2"] // >=3.2.1 <3.3.0
+ , ["~>3.2.1", "3.2.0"] // >=3.2.1 <3.3.0
+ , ["~1", "0.2.3"] // >=1.0.0 <2.0.0
+ , ["~>1", "2.2.3"]
+ , ["~1.0", "1.1.0"] // >=1.0.0 <1.1.0
+ , ["<1", "1.0.0"]
+ , [">=1.2", "1.1.1"]
+ , ["1", "2.0.0beta"]
+ , ["~v0.5.4-beta", "0.5.4-alpha"]
+ , ["<1", "1.0.0beta"]
+ , ["< 1", "1.0.0beta"]
+ ].forEach(function (v) {
+ t.ok(!satisfies(v[1], v[0]), v[0]+" not satisfied by "+v[1])
+ })
+ t.end()
+})
+
+test("\nincrement versions test", function (t) {
+; [ [ "1.2.3", "major", "2.0.0" ]
+ , [ "1.2.3", "minor", "1.3.0" ]
+ , [ "1.2.3", "patch", "1.2.4" ]
+ , [ "1.2.3", "build", "1.2.3-1" ]
+ , [ "1.2.3-4", "build", "1.2.3-5" ]
+ , [ "1.2.3tag", "major", "2.0.0" ]
+ , [ "1.2.3-tag", "major", "2.0.0" ]
+ , [ "1.2.3tag", "build", "1.2.3-1" ]
+ , [ "1.2.3-tag", "build", "1.2.3-1" ]
+ , [ "1.2.3-4-tag", "build", "1.2.3-5" ]
+ , [ "1.2.3-4tag", "build", "1.2.3-5" ]
+ , [ "1.2.3", "fake", null ]
+ , [ "fake", "major", null ]
+ ].forEach(function (v) {
+ t.equal(inc(v[0], v[1]), v[2], "inc("+v[0]+", "+v[1]+") === "+v[2])
+ })
+
+ t.end()
+})
+
+test("\nreplace stars test", function (t) {
+; [ [ "", "" ]
+ , [ "*", "" ]
+ , [ "> *", "" ]
+ , [ "<*", "" ]
+ , [ " >= *", "" ]
+ , [ "* || 1.2.3", " || 1.2.3" ]
+ ].forEach(function (v) {
+ t.equal(replaceStars(v[0]), v[1], "replaceStars("+v[0]+") === "+v[1])
+ })
+
+ t.end()
+})
+
+test("\nvalid range test", function (t) {
+; [ ["1.0.0 - 2.0.0", ">=1.0.0 <=2.0.0"]
+ , ["1.0.0", "1.0.0"]
+ , [">=*", ""]
+ , ["", ""]
+ , ["*", ""]
+ , ["*", ""]
+ , [">=1.0.0", ">=1.0.0"]
+ , [">1.0.0", ">1.0.0"]
+ , ["<=2.0.0", "<=2.0.0"]
+ , ["1", ">=1.0.0- <2.0.0-"]
+ , ["<=2.0.0", "<=2.0.0"]
+ , ["<=2.0.0", "<=2.0.0"]
+ , ["<2.0.0", "<2.0.0"]
+ , ["<2.0.0", "<2.0.0"]
+ , [">= 1.0.0", ">=1.0.0"]
+ , [">= 1.0.0", ">=1.0.0"]
+ , [">= 1.0.0", ">=1.0.0"]
+ , ["> 1.0.0", ">1.0.0"]
+ , ["> 1.0.0", ">1.0.0"]
+ , ["<= 2.0.0", "<=2.0.0"]
+ , ["<= 2.0.0", "<=2.0.0"]
+ , ["<= 2.0.0", "<=2.0.0"]
+ , ["< 2.0.0", "<2.0.0"]
+ , ["< 2.0.0", "<2.0.0"]
+ , [">=0.1.97", ">=0.1.97"]
+ , [">=0.1.97", ">=0.1.97"]
+ , ["0.1.20 || 1.2.4", "0.1.20||1.2.4"]
+ , [">=0.2.3 || <0.0.1", ">=0.2.3||<0.0.1"]
+ , [">=0.2.3 || <0.0.1", ">=0.2.3||<0.0.1"]
+ , [">=0.2.3 || <0.0.1", ">=0.2.3||<0.0.1"]
+ , ["||", "||"]
+ , ["2.x.x", ">=2.0.0- <3.0.0-"]
+ , ["1.2.x", ">=1.2.0- <1.3.0-"]
+ , ["1.2.x || 2.x", ">=1.2.0- <1.3.0-||>=2.0.0- <3.0.0-"]
+ , ["1.2.x || 2.x", ">=1.2.0- <1.3.0-||>=2.0.0- <3.0.0-"]
+ , ["x", ""]
+ , ["2.*.*", null]
+ , ["1.2.*", null]
+ , ["1.2.* || 2.*", null]
+ , ["1.2.* || 2.*", null]
+ , ["*", ""]
+ , ["2", ">=2.0.0- <3.0.0-"]
+ , ["2.3", ">=2.3.0- <2.4.0-"]
+ , ["~2.4", ">=2.4.0- <2.5.0-"]
+ , ["~2.4", ">=2.4.0- <2.5.0-"]
+ , ["~>3.2.1", ">=3.2.1- <3.3.0-"]
+ , ["~1", ">=1.0.0- <2.0.0-"]
+ , ["~>1", ">=1.0.0- <2.0.0-"]
+ , ["~> 1", ">=1.0.0- <2.0.0-"]
+ , ["~1.0", ">=1.0.0- <1.1.0-"]
+ , ["~ 1.0", ">=1.0.0- <1.1.0-"]
+ , ["<1", "<1.0.0-"]
+ , ["< 1", "<1.0.0-"]
+ , [">=1", ">=1.0.0-"]
+ , [">= 1", ">=1.0.0-"]
+ , ["<1.2", "<1.2.0-"]
+ , ["< 1.2", "<1.2.0-"]
+ , ["1", ">=1.0.0- <2.0.0-"]
+ ].forEach(function (v) {
+ t.equal(validRange(v[0]), v[1], "validRange("+v[0]+") === "+v[1])
+ })
+
+ t.end()
+})
+
+test("\ncomparators test", function (t) {
+; [ ["1.0.0 - 2.0.0", [[">=1.0.0", "<=2.0.0"]] ]
+ , ["1.0.0", [["1.0.0"]] ]
+ , [">=*", [[">=0.0.0-"]] ]
+ , ["", [[""]]]
+ , ["*", [[""]] ]
+ , ["*", [[""]] ]
+ , [">=1.0.0", [[">=1.0.0"]] ]
+ , [">=1.0.0", [[">=1.0.0"]] ]
+ , [">=1.0.0", [[">=1.0.0"]] ]
+ , [">1.0.0", [[">1.0.0"]] ]
+ , [">1.0.0", [[">1.0.0"]] ]
+ , ["<=2.0.0", [["<=2.0.0"]] ]
+ , ["1", [[">=1.0.0-", "<2.0.0-"]] ]
+ , ["<=2.0.0", [["<=2.0.0"]] ]
+ , ["<=2.0.0", [["<=2.0.0"]] ]
+ , ["<2.0.0", [["<2.0.0"]] ]
+ , ["<2.0.0", [["<2.0.0"]] ]
+ , [">= 1.0.0", [[">=1.0.0"]] ]
+ , [">= 1.0.0", [[">=1.0.0"]] ]
+ , [">= 1.0.0", [[">=1.0.0"]] ]
+ , ["> 1.0.0", [[">1.0.0"]] ]
+ , ["> 1.0.0", [[">1.0.0"]] ]
+ , ["<= 2.0.0", [["<=2.0.0"]] ]
+ , ["<= 2.0.0", [["<=2.0.0"]] ]
+ , ["<= 2.0.0", [["<=2.0.0"]] ]
+ , ["< 2.0.0", [["<2.0.0"]] ]
+ , ["<\t2.0.0", [["<2.0.0"]] ]
+ , [">=0.1.97", [[">=0.1.97"]] ]
+ , [">=0.1.97", [[">=0.1.97"]] ]
+ , ["0.1.20 || 1.2.4", [["0.1.20"], ["1.2.4"]] ]
+ , [">=0.2.3 || <0.0.1", [[">=0.2.3"], ["<0.0.1"]] ]
+ , [">=0.2.3 || <0.0.1", [[">=0.2.3"], ["<0.0.1"]] ]
+ , [">=0.2.3 || <0.0.1", [[">=0.2.3"], ["<0.0.1"]] ]
+ , ["||", [[""], [""]] ]
+ , ["2.x.x", [[">=2.0.0-", "<3.0.0-"]] ]
+ , ["1.2.x", [[">=1.2.0-", "<1.3.0-"]] ]
+ , ["1.2.x || 2.x", [[">=1.2.0-", "<1.3.0-"], [">=2.0.0-", "<3.0.0-"]] ]
+ , ["1.2.x || 2.x", [[">=1.2.0-", "<1.3.0-"], [">=2.0.0-", "<3.0.0-"]] ]
+ , ["x", [[""]] ]
+ , ["2.*.*", [[">=2.0.0-", "<3.0.0-"]] ]
+ , ["1.2.*", [[">=1.2.0-", "<1.3.0-"]] ]
+ , ["1.2.* || 2.*", [[">=1.2.0-", "<1.3.0-"], [">=2.0.0-", "<3.0.0-"]] ]
+ , ["1.2.* || 2.*", [[">=1.2.0-", "<1.3.0-"], [">=2.0.0-", "<3.0.0-"]] ]
+ , ["*", [[""]] ]
+ , ["2", [[">=2.0.0-", "<3.0.0-"]] ]
+ , ["2.3", [[">=2.3.0-", "<2.4.0-"]] ]
+ , ["~2.4", [[">=2.4.0-", "<2.5.0-"]] ]
+ , ["~2.4", [[">=2.4.0-", "<2.5.0-"]] ]
+ , ["~>3.2.1", [[">=3.2.1-", "<3.3.0-"]] ]
+ , ["~1", [[">=1.0.0-", "<2.0.0-"]] ]
+ , ["~>1", [[">=1.0.0-", "<2.0.0-"]] ]
+ , ["~> 1", [[">=1.0.0-", "<2.0.0-"]] ]
+ , ["~1.0", [[">=1.0.0-", "<1.1.0-"]] ]
+ , ["~ 1.0", [[">=1.0.0-", "<1.1.0-"]] ]
+ , ["<1", [["<1.0.0-"]] ]
+ , ["< 1", [["<1.0.0-"]] ]
+ , [">=1", [[">=1.0.0-"]] ]
+ , [">= 1", [[">=1.0.0-"]] ]
+ , ["<1.2", [["<1.2.0-"]] ]
+ , ["< 1.2", [["<1.2.0-"]] ]
+ , ["1", [[">=1.0.0-", "<2.0.0-"]] ]
+ ].forEach(function (v) {
+ t.equivalent(toComparators(v[0]), v[1], "toComparators("+v[0]+") === "+JSON.stringify(v[1]))
+ })
+
+ t.end()
+})
diff --git a/deps/npm/node_modules/slide/.npmignore b/deps/npm/node_modules/slide/.npmignore
new file mode 100644
index 0000000000..a136337994
--- /dev/null
+++ b/deps/npm/node_modules/slide/.npmignore
@@ -0,0 +1 @@
+*.pdf
diff --git a/deps/npm/node_modules/slide/LICENSE b/deps/npm/node_modules/slide/LICENSE
new file mode 100644
index 0000000000..05a4010949
--- /dev/null
+++ b/deps/npm/node_modules/slide/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/slide/README.md b/deps/npm/node_modules/slide/README.md
new file mode 100644
index 0000000000..6e4be2f94e
--- /dev/null
+++ b/deps/npm/node_modules/slide/README.md
@@ -0,0 +1,32 @@
+# Slide - a tiny flow control library
+
+Callbacks are simple and easy if you keep the pattern consistent.
+
+Check out the [slide
+presentation](http://github.com/isaacs/slide-flow-control/raw/master/nodejs-controlling-flow.pdf),
+or the [blog post](http://howto.no.de/flow-control-in-npm).
+
+You'll laugh when you see how little code is actually in this thing.
+It's so not-enterprisey, you won't believe it. It does almost nothing,
+but it's super handy.
+
+I use this util in [a real world program](http://npmjs.org/).
+
+You should use it as an example of how to write your own flow control
+utilities. You'll never fully appreciate a flow control lib that you
+didn't write yourself.
+
+## Installation
+
+Just copy the files into your project, and use them that way, or
+you can do this:
+
+ npm install slide
+
+and then:
+
+ var asyncMap = require("slide").asyncMap
+ , chain = require("slide").chain
+ // use the power!
+
+Enjoy!
diff --git a/deps/npm/node_modules/slide/index.js b/deps/npm/node_modules/slide/index.js
new file mode 100644
index 0000000000..0a9277f6ee
--- /dev/null
+++ b/deps/npm/node_modules/slide/index.js
@@ -0,0 +1 @@
+module.exports=require("./lib/slide")
diff --git a/deps/npm/node_modules/slide/lib/async-map-ordered.js b/deps/npm/node_modules/slide/lib/async-map-ordered.js
new file mode 100644
index 0000000000..5cca79a82c
--- /dev/null
+++ b/deps/npm/node_modules/slide/lib/async-map-ordered.js
@@ -0,0 +1,65 @@
+
+throw new Error("TODO: Not yet implemented.")
+
+/*
+usage:
+
+Like asyncMap, but only can take a single cb, and guarantees
+the order of the results.
+*/
+
+module.exports = asyncMapOrdered
+
+function asyncMapOrdered (list, fn, cb_) {
+ if (typeof cb_ !== "function") throw new Error(
+ "No callback provided to asyncMapOrdered")
+
+ if (typeof fn !== "function") throw new Error(
+ "No map function provided to asyncMapOrdered")
+
+ if (list === undefined || list === null) return cb_(null, [])
+ if (!Array.isArray(list)) list = [list]
+ if (!list.length) return cb_(null, [])
+
+ var errState = null
+ , l = list.length
+ , a = l
+ , res = []
+ , resCount = 0
+ , maxArgLen = 0
+
+ function cb (index) { return function () {
+ if (errState) return
+ var er = arguments[0]
+ var argLen = arguments.length
+ maxArgLen = Math.max(maxArgLen, argLen)
+ res[index] = argLen === 1 ? [er] : Array.apply(null, arguments)
+
+ // see if any new things have been added.
+ if (list.length > l) {
+ var newList = list.slice(l)
+ a += (list.length - l)
+ var oldLen = l
+ l = list.length
+ process.nextTick(function () {
+ newList.forEach(function (ar, i) { fn(ar, cb(i + oldLen)) })
+ })
+ }
+
+ if (er || --a === 0) {
+ errState = er
+ cb_.apply(null, [errState].concat(flip(res, resCount, maxArgLen)))
+ }
+ }}
+ // expect the supplied cb function to be called
+ // "n" times for each thing in the array.
+ list.forEach(function (ar) {
+ steps.forEach(function (fn, i) { fn(ar, cb(i)) })
+ })
+}
+
+function flip (res, resCount, argLen) {
+ var flat = []
+ // res = [[er, x, y], [er, x1, y1], [er, x2, y2, z2]]
+ // return [[x, x1, x2], [y, y1, y2], [undefined, undefined, z2]]
+
diff --git a/deps/npm/node_modules/slide/lib/async-map.js b/deps/npm/node_modules/slide/lib/async-map.js
new file mode 100644
index 0000000000..1ced158e0e
--- /dev/null
+++ b/deps/npm/node_modules/slide/lib/async-map.js
@@ -0,0 +1,56 @@
+
+/*
+usage:
+
+// do something to a list of things
+asyncMap(myListOfStuff, function (thing, cb) { doSomething(thing.foo, cb) }, cb)
+// do more than one thing to each item
+asyncMap(list, fooFn, barFn, cb)
+
+*/
+
+module.exports = asyncMap
+
+function asyncMap () {
+ var steps = Array.prototype.slice.call(arguments)
+ , list = steps.shift() || []
+ , cb_ = steps.pop()
+ if (typeof cb_ !== "function") throw new Error(
+ "No callback provided to asyncMap")
+ if (!list) return cb_(null, [])
+ if (!Array.isArray(list)) list = [list]
+ var n = steps.length
+ , data = [] // 2d array
+ , errState = null
+ , l = list.length
+ , a = l * n
+ if (!a) return cb_(null, [])
+ function cb (er) {
+ if (errState) return
+ var argLen = arguments.length
+ for (var i = 1; i < argLen; i ++) if (arguments[i] !== undefined) {
+ data[i - 1] = (data[i - 1] || []).concat(arguments[i])
+ }
+ // see if any new things have been added.
+ if (list.length > l) {
+ var newList = list.slice(l)
+ a += (list.length - l) * n
+ l = list.length
+ process.nextTick(function () {
+ newList.forEach(function (ar) {
+ steps.forEach(function (fn) { fn(ar, cb) })
+ })
+ })
+ }
+
+ if (er || --a === 0) {
+ errState = er
+ cb_.apply(null, [errState].concat(data))
+ }
+ }
+ // expect the supplied cb function to be called
+ // "n" times for each thing in the array.
+ list.forEach(function (ar) {
+ steps.forEach(function (fn) { fn(ar, cb) })
+ })
+}
diff --git a/deps/npm/node_modules/slide/lib/bind-actor.js b/deps/npm/node_modules/slide/lib/bind-actor.js
new file mode 100644
index 0000000000..6a37072749
--- /dev/null
+++ b/deps/npm/node_modules/slide/lib/bind-actor.js
@@ -0,0 +1,16 @@
+module.exports = bindActor
+function bindActor () {
+ var args =
+ Array.prototype.slice.call
+ (arguments) // jswtf.
+ , obj = null
+ , fn
+ if (typeof args[0] === "object") {
+ obj = args.shift()
+ fn = args.shift()
+ if (typeof fn === "string")
+ fn = obj[ fn ]
+ } else fn = args.shift()
+ return function (cb) {
+ fn.apply(obj, args.concat(cb)) }
+}
diff --git a/deps/npm/node_modules/slide/lib/chain.js b/deps/npm/node_modules/slide/lib/chain.js
new file mode 100644
index 0000000000..17b3711494
--- /dev/null
+++ b/deps/npm/node_modules/slide/lib/chain.js
@@ -0,0 +1,20 @@
+module.exports = chain
+var bindActor = require("./bind-actor.js")
+chain.first = {} ; chain.last = {}
+function chain (things, cb) {
+ var res = []
+ ;(function LOOP (i, len) {
+ if (i >= len) return cb(null,res)
+ if (Array.isArray(things[i]))
+ things[i] = bindActor.apply(null,
+ things[i].map(function(i){
+ return (i===chain.first) ? res[0]
+ : (i===chain.last)
+ ? res[res.length - 1] : i }))
+ if (!things[i]) return LOOP(i + 1, len)
+ things[i](function (er, data) {
+ if (er) return cb(er, res)
+ if (data !== undefined) res = res.concat(data)
+ LOOP(i + 1, len)
+ })
+ })(0, things.length) }
diff --git a/deps/npm/node_modules/slide/lib/slide.js b/deps/npm/node_modules/slide/lib/slide.js
new file mode 100644
index 0000000000..6e9ec2327a
--- /dev/null
+++ b/deps/npm/node_modules/slide/lib/slide.js
@@ -0,0 +1,3 @@
+exports.asyncMap = require("./async-map")
+exports.bindActor = require("./bind-actor")
+exports.chain = require("./chain")
diff --git a/deps/npm/node_modules/slide/package.json b/deps/npm/node_modules/slide/package.json
new file mode 100644
index 0000000000..5cc2689e63
--- /dev/null
+++ b/deps/npm/node_modules/slide/package.json
@@ -0,0 +1,19 @@
+{
+ "name": "slide",
+ "version": "1.1.3",
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "contributors": [
+ "S. Sriram <ssriram@gmail.com> (http://www.565labs.com)"
+ ],
+ "description": "A flow control lib small enough to fit on in a slide presentation. Derived live at Oak.JS",
+ "main": "./lib/slide.js",
+ "dependencies": {},
+ "devDependencies": {},
+ "engines": {
+ "node": "*"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/slide-flow-control.git"
+ }
+}
diff --git a/deps/npm/node_modules/tar/.gitignore b/deps/npm/node_modules/tar/.gitignore
new file mode 100644
index 0000000000..f96c7db109
--- /dev/null
+++ b/deps/npm/node_modules/tar/.gitignore
@@ -0,0 +1,4 @@
+.*.swp
+node_modules
+examples/extract/
+test/tmp/
diff --git a/deps/npm/node_modules/tar/README.md b/deps/npm/node_modules/tar/README.md
new file mode 100644
index 0000000000..7cfe3bbca5
--- /dev/null
+++ b/deps/npm/node_modules/tar/README.md
@@ -0,0 +1,50 @@
+# node-tar
+
+Tar for Node.js.
+
+## Goals of this project
+
+1. Be able to parse and reasonably extract the contents of any tar file
+ created by any program that creates tar files, period.
+
+ At least, this includes every version of:
+
+ * bsdtar
+ * gnutar
+ * solaris posix tar
+ * Joerg Schilling's star ("Schilly tar")
+
+2. Create tar files that can be extracted by any of the following tar
+ programs:
+
+ * bsdtar/libarchive version 2.6.2
+ * gnutar 1.15 and above
+ * SunOS Posix tar
+ * Joerg Schilling's star ("Schilly tar")
+
+3. 100% test coverage. Speed is important. Correctness is slightly
+ more important.
+
+4. Create the kind of tar interface that Node users would want to use.
+
+5. Satisfy npm's needs for a portable tar implementation with a
+ JavaScript interface.
+
+6. No excuses. No complaining. No tolerance for failure.
+
+## But isn't there already a tar.js?
+
+Yes, there are a few. This one is going to be better, and it will be
+fanatically maintained, because npm will depend on it.
+
+That's why I need to write it from scratch. Creating and extracting
+tarballs is such a large part of what npm does, I simply can't have it
+be a black box any longer.
+
+## Didn't you have something already? Where'd it go?
+
+It's in the "old" folder. It's not functional. Don't use it.
+
+It was a useful exploration to learn the issues involved, but like most
+software of any reasonable complexity, node-tar won't be useful until
+it's been written at least 3 times.
diff --git a/deps/npm/node_modules/tar/examples/extracter.js b/deps/npm/node_modules/tar/examples/extracter.js
new file mode 100644
index 0000000000..e150abf25d
--- /dev/null
+++ b/deps/npm/node_modules/tar/examples/extracter.js
@@ -0,0 +1,11 @@
+var tar = require("../tar.js")
+ , fs = require("fs")
+
+fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
+ .pipe(tar.Extract({ path: __dirname + "/extract" }))
+ .on("error", function (er) {
+ console.error("error here")
+ })
+ .on("end", function () {
+ console.error("done")
+ })
diff --git a/deps/npm/node_modules/tar/examples/reader.js b/deps/npm/node_modules/tar/examples/reader.js
new file mode 100644
index 0000000000..c2584d398f
--- /dev/null
+++ b/deps/npm/node_modules/tar/examples/reader.js
@@ -0,0 +1,36 @@
+var tar = require("../tar.js")
+ , fs = require("fs")
+
+fs.createReadStream(__dirname + "/../test/fixtures/c.tar")
+ .pipe(tar.Reader())
+ .on("extendedHeader", function (e) {
+ console.error("extended pax header", e.props)
+ e.on("end", function () {
+ console.error("extended pax fields:", e.fields)
+ })
+ })
+ .on("ignoredEntry", function (e) {
+ console.error("ignoredEntry?!?", e.props)
+ })
+ .on("longLinkpath", function (e) {
+ console.error("longLinkpath entry", e.props)
+ e.on("end", function () {
+ console.error("value=%j", e.body.toString())
+ })
+ })
+ .on("longPath", function (e) {
+ console.error("longPath entry", e.props)
+ e.on("end", function () {
+ console.error("value=%j", e.body.toString())
+ })
+ })
+ .on("entry", function (e) {
+ console.error("entry", e.props)
+ e.on("data", function (c) {
+ console.error(" >>>" + c.toString().replace(/\n/g, "\\n"))
+ })
+ e.on("end", function () {
+ console.error(" <<<EOF")
+ })
+ })
+
diff --git a/deps/npm/node_modules/tar/lib/buffer-entry.js b/deps/npm/node_modules/tar/lib/buffer-entry.js
new file mode 100644
index 0000000000..5322a28abf
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/buffer-entry.js
@@ -0,0 +1,30 @@
+// just like the Entry class, but it buffers the contents
+//
+// XXX It would be good to set a maximum BufferEntry filesize,
+// since it eats up memory. In normal operation,
+// these are only for long filenames or link names, which are
+// rarely very big.
+
+module.exports = BufferEntry
+
+var inherits = require("inherits")
+ , Entry = require("./entry.js")
+
+function BufferEntry () {
+ Entry.apply(this, arguments)
+ this._buffer = new Buffer(this.props.size)
+ this._offset = 0
+ this.body = ""
+ this.on("end", function () {
+ this.body = this._buffer.toString().slice(0, -1)
+ })
+}
+
+// collect the bytes as they come in.
+BufferEntry.prototype.write = function (c) {
+ c.copy(this._buffer, this._offset)
+ this._offset += c.length
+ Entry.prototype.write.call(this, c)
+}
+
+inherits(BufferEntry, Entry)
diff --git a/deps/npm/node_modules/tar/lib/entry-writer.js b/deps/npm/node_modules/tar/lib/entry-writer.js
new file mode 100644
index 0000000000..b8a87ea131
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/entry-writer.js
@@ -0,0 +1,166 @@
+module.exports = EntryWriter
+
+var tar = require("../tar.js")
+ , TarHeader = require("./header.js")
+ , Entry = require("./entry.js")
+ , inherits = require("inherits")
+ , BlockStream = require("block-stream")
+ , ExtendedHeaderWriter
+ , Stream = require("stream").Stream
+ , EOF = {}
+
+inherits(EntryWriter, Stream)
+
+function EntryWriter (props) {
+ var me = this
+
+ if (!(me instanceof EntryWriter)) {
+ return new EntryWriter(props)
+ }
+
+ Stream.apply(this)
+
+ me.writable = true
+ me.readable = true
+
+ me._stream = new BlockStream(512)
+
+ me._stream.on("data", function (c) {
+ me.emit("data", c)
+ })
+
+ me._stream.on("drain", function () {
+ me.emit("drain")
+ })
+
+ me._stream.on("end", function () {
+ me.emit("end")
+ me.emit("close")
+ })
+
+ me.props = props
+ if (props.type === "Directory") {
+ props.size = 0
+ }
+ props.ustar = "ustar\0"
+ props.ustarver = "00"
+ me.path = props.path
+
+ me._buffer = []
+ me._didHeader = false
+ me._meta = false
+
+ me.on("pipe", function () {
+ me._process()
+ })
+}
+
+EntryWriter.prototype.write = function (c) {
+ // console.error(".. ew write")
+ if (this._ended) return this.emit("error", new Error("write after end"))
+ this._buffer.push(c)
+ this._process()
+ this._needDrain = this._buffer.length > 0
+ return !this._needDrain
+}
+
+EntryWriter.prototype.end = function (c) {
+ // console.error(".. ew end")
+ if (c) this._buffer.push(c)
+ this._buffer.push(EOF)
+ this._ended = true
+ this._process()
+ this._needDrain = this._buffer.length > 0
+}
+
+EntryWriter.prototype.pause = function () {
+ // console.error(".. ew pause")
+ this._paused = true
+ this.emit("pause")
+}
+
+EntryWriter.prototype.resume = function () {
+ // console.error(".. ew resume")
+ this._paused = false
+ this.emit("resume")
+ this._process()
+}
+
+EntryWriter.prototype.add = function (entry) {
+ // console.error(".. ew add")
+ if (!this.parent) return this.emit("error", new Error("no parent"))
+
+ // make sure that the _header and such is emitted, and clear out
+ // the _currentEntry link on the parent.
+ if (!this._ended) this.end()
+
+ return this.parent.add(entry)
+}
+
+EntryWriter.prototype._header = function () {
+ // console.error(".. ew header")
+ if (this._didHeader) return
+ this._didHeader = true
+
+ var headerBlock = TarHeader.encode(this.props)
+
+ if (this.props.needExtended && !this._meta) {
+ var me = this
+
+ ExtendedHeaderWriter = ExtendedHeaderWriter ||
+ require("./extended-header-writer.js")
+
+ ExtendedHeaderWriter(this.props)
+ .on("data", function (c) {
+ me.emit("data", c)
+ })
+ .end()
+ }
+
+ // console.error(".. .. ew headerBlock emitting")
+ this.emit("data", headerBlock)
+ this.emit("header")
+}
+
+EntryWriter.prototype._process = function () {
+ // console.error(".. .. ew process")
+ if (!this._didHeader && !this._meta) {
+ this._header()
+ }
+
+ if (this._paused || this._processing) {
+ // console.error(".. .. .. paused=%j, processing=%j", this._paused, this._processing)
+ return
+ }
+
+ this._processing = true
+
+ var buf = this._buffer
+ for (var i = 0; i < buf.length; i ++) {
+ // console.error(".. .. .. i=%d", i)
+
+ var c = buf[i]
+
+ if (c === EOF) this._stream.end()
+ else this._stream.write(c)
+
+ if (this._paused) {
+ // console.error(".. .. .. paused mid-emission")
+ this._processing = false
+ if (i < buf.length) {
+ this._needDrain = true
+ this._buffer = buf.slice(i + 1)
+ }
+ return
+ }
+ }
+
+ // console.error(".. .. .. emitted")
+ this._buffer.length = 0
+ this._processing = false
+
+ // console.error(".. .. .. emitting drain")
+ this.emit("drain")
+}
+
+EntryWriter.prototype.destroy = function () {}
diff --git a/deps/npm/node_modules/tar/lib/entry.js b/deps/npm/node_modules/tar/lib/entry.js
new file mode 100644
index 0000000000..4fc331eb94
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/entry.js
@@ -0,0 +1,212 @@
+// A passthrough read/write stream that sets its properties
+// based on a header, extendedHeader, and globalHeader
+//
+// Can be either a file system object of some sort, or
+// a pax/ustar metadata entry.
+
+module.exports = Entry
+
+var TarHeader = require("./header.js")
+ , tar = require("../tar")
+ , assert = require("assert").ok
+ , Stream = require("stream").Stream
+ , inherits = require("inherits")
+ , fstream = require("fstream").Abstract
+
+function Entry (header, extended, global) {
+ Stream.call(this)
+ this.readable = true
+ this.writable = true
+
+ this._needDrain = false
+ this._paused = false
+ this._reading = false
+ this._ending = false
+ this._ended = false
+ this._remaining = 0
+ this._queue = []
+ this._index = 0
+ this._queueLen = 0
+
+ this._read = this._read.bind(this)
+
+ this.props = {}
+ this._header = header
+ this._extended = extended || {}
+
+ // globals can change throughout the course of
+ // a file parse operation. Freeze it at its current state.
+ this._global = {}
+ var me = this
+ Object.keys(global || {}).forEach(function (g) {
+ me._global[g] = global[g]
+ })
+
+ this._setProps()
+}
+
+inherits(Entry, Stream,
+{ write: function (c) {
+ if (this._ending) this.error("write() after end()", null, true)
+ if (this._remaining === 0) {
+ this.error("invalid bytes past eof")
+ }
+
+ // often we'll get a bunch of \0 at the end of the last write,
+ // since chunks will always be 512 bytes when reading a tarball.
+ if (c.length > this._remaining) {
+ c = c.slice(0, this._remaining)
+ }
+ this._remaining -= c.length
+
+ // put it on the stack.
+ var ql = this._queueLen
+ this._queue.push(c)
+ this._queueLen ++
+
+ this._read()
+
+ // either paused, or buffered
+ if (this._paused || ql > 0) {
+ this._needDrain = true
+ return false
+ }
+
+ return true
+ }
+
+, end: function (c) {
+ if (c) this.write(c)
+ this._ending = true
+ this._read()
+ }
+
+, pause: function () {
+ this._paused = true
+ this.emit("pause")
+ }
+
+, resume: function () {
+ // console.error(" Tar Entry resume", this.path)
+ this.emit("resume")
+ this._paused = false
+ this._read()
+ return this._queueLen - this._index > 1
+ }
+
+ // This is bound to the instance
+, _read: function () {
+ // console.error(" Tar Entry _read", this.path)
+
+ if (this._paused || this._reading || this._ended) return
+
+ // set this flag so that event handlers don't inadvertently
+ // get multiple _read() calls running.
+ this._reading = true
+
+ // have any data to emit?
+ if (this._index < this._queueLen) {
+ var chunk = this._queue[this._index ++]
+ this.emit("data", chunk)
+ }
+
+ // check if we're drained
+ if (this._index >= this._queueLen) {
+ this._queue.length = this._queueLen = this._index = 0
+ if (this._needDrain) {
+ this._needDrain = false
+ this.emit("drain")
+ }
+ if (this._ending) {
+ this._ended = true
+ this.emit("end")
+ }
+ }
+
+ // if the queue gets too big, then pluck off whatever we can.
+ // this should be fairly rare.
+ var mql = this._maxQueueLen
+ if (this._queueLen > mql && this._index > 0) {
+ mql = Math.min(this._index, mql)
+ this._index -= mql
+ this._queueLen -= mql
+ this._queue = this._queue.slice(mql)
+ }
+
+ this._reading = false
+ }
+
+, _setProps: function () {
+ // props = extended->global->header->{}
+ var header = this._header
+ , extended = this._extended
+ , global = this._global
+ , props = this.props
+
+ // first get the values from the normal header.
+ var fields = tar.fields
+ for (var f = 0; fields[f] !== null; f ++) {
+ var field = fields[f]
+ , val = header[field]
+ if (typeof val !== "undefined") props[field] = val
+ }
+
+ // next, the global header for this file.
+ // numeric values, etc, will have already been parsed.
+ ;[global, extended].forEach(function (p) {
+ Object.keys(p).forEach(function (f) {
+ if (typeof p[f] !== "undefined") props[f] = p[f]
+ })
+ })
+
+ // no nulls allowed in path or linkpath
+ ;["path", "linkpath"].forEach(function (p) {
+ if (props.hasOwnProperty(p)) {
+ props[p] = props[p].split("\0")[0]
+ }
+ })
+
+
+ // set date fields to be a proper date
+ ;["mtime", "ctime", "atime"].forEach(function (p) {
+ if (props.hasOwnProperty(p)) {
+ props[p] = new Date(props[p] * 1000)
+ }
+ })
+
+ // set the type so that we know what kind of file to create
+ var type
+ switch (tar.types[props.type]) {
+ case "OldFile":
+ case "ContiguousFile":
+ type = "File"
+ break
+
+ case "GNUDumpDir":
+ type = "Directory"
+ break
+
+ case undefined:
+ type = "Unknown"
+ break
+
+ case "Link":
+ case "SymbolicLink":
+ case "CharacterDevice":
+ case "BlockDevice":
+ case "Directory":
+ case "FIFO":
+ default:
+ type = tar.types[props.type]
+ }
+
+ this.type = type
+ this.path = props.path
+ this.size = props.size
+
+ // size is special, since it signals when the file needs to end.
+ this._remaining = props.size
+ }
+, warn: fstream.warn
+, error: fstream.error
+})
diff --git a/deps/npm/node_modules/tar/lib/extended-header-writer.js b/deps/npm/node_modules/tar/lib/extended-header-writer.js
new file mode 100644
index 0000000000..92e09681a8
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/extended-header-writer.js
@@ -0,0 +1,165 @@
+
+module.exports = ExtendedHeaderWriter
+
+var inherits = require("inherits")
+ , EntryWriter = require("./entry-writer.js")
+
+inherits(ExtendedHeaderWriter, EntryWriter)
+
+var tar = require("../tar.js")
+ , path = require("path")
+ , inherits = require("inherits")
+ , TarHeader = require("./header.js")
+
+// props is the props of the thing we need to write an
+// extended header for.
+// Don't be shy with it. Just encode everything.
+function ExtendedHeaderWriter (props) {
+ // console.error(">> ehw ctor")
+ var me = this
+
+ if (!(me instanceof ExtendedHeaderWriter)) {
+ return new ExtendedHeaderWriter(props)
+ }
+
+ me.fields = props
+
+ var p =
+ { path : ("PaxHeader" + path.join("/", props.path || ""))
+ .replace(/\\/g, "/").substr(0, 100)
+ , mode : props.mode || 0666
+ , uid : props.uid || 0
+ , gid : props.gid || 0
+ , size : 0 // will be set later
+ , mtime : props.mtime || Date.now() / 1000
+ , type : "x"
+ , linkpath : ""
+ , ustar : "ustar\0"
+ , ustarver : "00"
+ , uname : props.uname || ""
+ , gname : props.gname || ""
+ , devmaj : props.devmaj || 0
+ , devmin : props.devmin || 0
+ }
+
+
+ EntryWriter.call(me, p)
+ // console.error(">> ehw props", me.props)
+ me.props = p
+
+ me._meta = true
+}
+
+ExtendedHeaderWriter.prototype.end = function () {
+ // console.error(">> ehw end")
+ var me = this
+
+ if (me._ended) return
+ me._ended = true
+
+ me._encodeFields()
+ me._stream.write(TarHeader.encode(me.props))
+ me.body.forEach(function (l) {
+ me._stream.write(l)
+ })
+ me._ready = true
+
+ // console.error(">> ehw _process calling end()", me.props)
+ this._stream.end()
+}
+
+ExtendedHeaderWriter.prototype._encodeFields = function () {
+ // console.error(">> ehw _encodeFields")
+ this.body = []
+ if (this.fields.prefix) {
+ this.fields.path = this.fields.prefix + "/" + this.fields.path
+ this.fields.prefix = ""
+ }
+ encodeFields(this.fields, "", this.body)
+ var me = this
+ this.body.forEach(function (l) {
+ me.props.size += l.length
+ })
+}
+
+function encodeFields (fields, prefix, body) {
+ // console.error(">> >> ehw encodeFields")
+ // "%d %s=%s\n", <length>, <keyword>, <value>
+ // The length is a decimal number, and includes itself and the \n
+ // Numeric values are decimal strings.
+
+ Object.keys(fields).forEach(function (k) {
+ var val = fields[k]
+ , numeric = tar.numeric[k]
+
+ if (prefix) k = prefix + "." + k
+
+ // already including NODETAR.type, don't need File=true also
+ if (k === fields.type && val === true) return
+
+ switch (k) {
+ // don't include anything that's always handled just fine
+ // in the normal header, or only meaningful in the context
+ // of nodetar
+ case "mode":
+ case "cksum":
+ case "ustar":
+ case "ustarver":
+ case "prefix":
+ case "basename":
+ case "dirname":
+ case "needExtended":
+ case "block":
+ case "filter":
+ return
+
+ case "rdev":
+ if (val === 0) return
+ break
+
+ case "nlink":
+ case "dev": // Truly a hero among men, Creator of Star!
+ case "ino": // Speak his name with reverent awe! It is:
+ k = "SCHILY." + k
+ break
+
+ default: break
+ }
+
+ if (val && typeof val === "object" &&
+ !Buffer.isBuffer(val)) encodeFields(val, k, body)
+ else if (val === null || val === undefined) return
+ else body.push.apply(body, encodeField(k, val))
+ })
+
+ return body
+}
+
+function encodeField (k, v) {
+ // lowercase keys must be valid, otherwise prefix with
+ // "NODETAR."
+ if (k.charAt(0) === k.charAt(0).toLowerCase()) {
+ var m = k.split(".")[0]
+ if (!tar.knownExtended[m]) k = "NODETAR." + k
+ }
+
+ if (typeof val === "number") val = val.toString(10)
+
+ var s = new Buffer(" " + k + "=" + v + "\n")
+ , digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1
+
+ // if adding that many digits will make it go over that length,
+ // then add one to it. For example, if the string is:
+ // " foo=bar\n"
+ // then that's 9 characters. With the "9", that bumps the length
+ // up to 10. However, this is invalid:
+ // "10 foo=bar\n"
+ // but, since that's actually 11 characters, since 10 adds another
+ // character to the length, and the length includes the number
+ // itself. In that case, just bump it up by 1.
+ if (s.length > Math.pow(10, digits) - digits) digits ++
+
+ var len = digits + s.length
+
+ return [new Buffer("" + len), s]
+}
diff --git a/deps/npm/node_modules/tar/lib/extended-header.js b/deps/npm/node_modules/tar/lib/extended-header.js
new file mode 100644
index 0000000000..4346d6c59b
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/extended-header.js
@@ -0,0 +1,139 @@
+// An Entry consisting of:
+//
+// "%d %s=%s\n", <length>, <keyword>, <value>
+//
+// The length is a decimal number, and includes itself and the \n
+// \0 does not terminate anything. Only the length terminates the string.
+// Numeric values are decimal strings.
+
+module.exports = ExtendedHeader
+
+var Entry = require("./entry.js")
+ , inherits = require("inherits")
+ , tar = require("../tar.js")
+ , numeric = tar.numeric
+ , keyTrans = { "SCHILY.dev": "dev"
+ , "SCHILY.ino": "ino"
+ , "SCHILY.nlink": "nlink" }
+
+function ExtendedHeader () {
+ Entry.apply(this, arguments)
+ this.on("data", this._parse)
+ this.fields = {}
+ this._position = 0
+ this._fieldPos = 0
+ this._state = SIZE
+ this._sizeBuf = []
+ this._keyBuf = []
+ this._valBuf = []
+ this._size = -1
+ this._key = ""
+}
+
+inherits(ExtendedHeader, Entry, { _parse: parse })
+
+var s = 0
+ , states = ExtendedHeader.states = {}
+ , SIZE = states.SIZE = s++
+ , KEY = states.KEY = s++
+ , VAL = states.VAL = s++
+ , ERR = states.ERR = s++
+
+Object.keys(states).forEach(function (s) {
+ states[states[s]] = states[s]
+})
+
+states[s] = null
+
+// char code values for comparison
+var _0 = "0".charCodeAt(0)
+ , _9 = "9".charCodeAt(0)
+ , point = ".".charCodeAt(0)
+ , a = "a".charCodeAt(0)
+ , Z = "Z".charCodeAt(0)
+ , a = "a".charCodeAt(0)
+ , z = "z".charCodeAt(0)
+ , space = " ".charCodeAt(0)
+ , eq = "=".charCodeAt(0)
+ , cr = "\n".charCodeAt(0)
+
+function parse (c) {
+ if (this._state === ERR) return
+
+ for ( var i = 0, l = c.length
+ ; i < l
+ ; this._position++, this._fieldPos++, i++) {
+ // console.error("top of loop, size="+this._size)
+
+ var b = c[i]
+
+ if (this._size >= 0 && this._fieldPos > this._size) {
+ error(this, "field exceeds length="+this._size)
+ return
+ }
+
+ switch (this._state) {
+ case ERR: return
+
+ case SIZE:
+ // console.error("parsing size, b=%d, rest=%j", b, c.slice(i).toString())
+ if (b === space) {
+ this._state = KEY
+ // this._fieldPos = this._sizeBuf.length
+ this._size = parseInt(new Buffer(this._sizeBuf).toString(), 10)
+ this._sizeBuf.length = 0
+ continue
+ }
+ if (b < _0 || b > _9) {
+ error(this, "expected [" + _0 + ".." + _9 + "], got " + b)
+ return
+ }
+ this._sizeBuf.push(b)
+ continue
+
+ case KEY:
+ // can be any char except =, not > size.
+ if (b === eq) {
+ this._state = VAL
+ this._key = new Buffer(this._keyBuf).toString()
+ if (keyTrans[this._key]) this._key = keyTrans[this._key]
+ this._keyBuf.length = 0
+ continue
+ }
+ this._keyBuf.push(b)
+ continue
+
+ case VAL:
+ // field must end with cr
+ if (this._fieldPos === this._size - 1) {
+ // console.error("finished with "+this._key)
+ if (b !== cr) {
+ error(this, "expected \\n at end of field")
+ return
+ }
+ var val = new Buffer(this._valBuf).toString()
+ if (numeric[this._key]) {
+ val = parseFloat(val)
+ }
+ this.fields[this._key] = val
+
+ this._valBuf.length = 0
+ this._state = SIZE
+ this._size = -1
+ this._fieldPos = -1
+ continue
+ }
+ this._valBuf.push(b)
+ continue
+ }
+ }
+}
+
+function error (me, msg) {
+ msg = "invalid header: " + msg
+ + "\nposition=" + me._position
+ + "\nfield position=" + me._fieldPos
+
+ me.error(msg)
+ me.state = ERR
+}
diff --git a/deps/npm/node_modules/tar/lib/extract.js b/deps/npm/node_modules/tar/lib/extract.js
new file mode 100644
index 0000000000..e45974c723
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/extract.js
@@ -0,0 +1,64 @@
+// give it a tarball and a path, and it'll dump the contents
+
+module.exports = Extract
+
+var tar = require("../tar.js")
+ , fstream = require("fstream")
+ , inherits = require("inherits")
+ , path = require("path")
+
+function Extract (opts) {
+ if (!(this instanceof Extract)) return new Extract(opts)
+ tar.Parse.apply(this)
+
+ // have to dump into a directory
+ opts.type = "Directory"
+ opts.Directory = true
+
+ if (typeof opts !== "object") {
+ opts = { path: opts }
+ }
+
+ // better to drop in cwd? seems more standard.
+ opts.path = opts.path || path.resolve("node-tar-extract")
+ opts.type = "Directory"
+ opts.Directory = true
+
+ this._fst = fstream.Writer(opts)
+
+ this.pause()
+ var me = this
+
+ // Hardlinks in tarballs are relative to the root
+ // of the tarball. So, they need to be resolved against
+ // the target directory in order to be created properly.
+ me.on("entry", function (entry) {
+ if (entry.type !== "Link") return
+ entry.linkpath = entry.props.linkpath =
+ path.join(opts.path, path.join("/", entry.props.linkpath))
+ })
+
+ this._fst.on("ready", function () {
+ me.pipe(me._fst, { end: false })
+ me.resume()
+ })
+
+ // this._fst.on("end", function () {
+ // console.error("\nEEEE Extract End", me._fst.path)
+ // })
+
+ this._fst.on("close", function () {
+ // console.error("\nEEEE Extract End", me._fst.path)
+ me.emit("end")
+ me.emit("close")
+ })
+}
+
+inherits(Extract, tar.Parse)
+
+Extract.prototype._streamEnd = function () {
+ var me = this
+ if (!me._ended) me.error("unexpected eof")
+ me._fst.end()
+ // my .end() is coming later.
+}
diff --git a/deps/npm/node_modules/tar/lib/global-header-writer.js b/deps/npm/node_modules/tar/lib/global-header-writer.js
new file mode 100644
index 0000000000..0bfc7b80aa
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/global-header-writer.js
@@ -0,0 +1,14 @@
+module.exports = GlobalHeaderWriter
+
+var ExtendedHeaderWriter = require("./extended-header-writer.js")
+ , inherits = require("inherits")
+
+inherits(GlobalHeaderWriter, ExtendedHeaderWriter)
+
+function GlobalHeaderWriter (props) {
+ if (!(this instanceof GlobalHeaderWriter)) {
+ return new GlobalHeaderWriter(props)
+ }
+ ExtendedHeaderWriter.call(this, props)
+ this.props.type = "g"
+}
diff --git a/deps/npm/node_modules/tar/lib/header.js b/deps/npm/node_modules/tar/lib/header.js
new file mode 100644
index 0000000000..05b237c0c7
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/header.js
@@ -0,0 +1,385 @@
+// parse a 512-byte header block to a data object, or vice-versa
+// If the data won't fit nicely in a simple header, then generate
+// the appropriate extended header file, and return that.
+
+module.exports = TarHeader
+
+var tar = require("../tar.js")
+ , fields = tar.fields
+ , fieldOffs = tar.fieldOffs
+ , fieldEnds = tar.fieldEnds
+ , fieldSize = tar.fieldSize
+ , numeric = tar.numeric
+ , assert = require("assert").ok
+ , space = " ".charCodeAt(0)
+ , slash = "/".charCodeAt(0)
+ , bslash = process.platform === "win32" ? "\\".charCodeAt(0) : null
+
+function TarHeader (block) {
+ if (!(this instanceof TarHeader)) return new TarHeader(block)
+ if (block) this.decode(block)
+}
+
+TarHeader.prototype =
+ { decode : decode
+ , encode: encode
+ , calcSum: calcSum
+ , checkSum: checkSum
+ }
+
+TarHeader.parseNumeric = parseNumeric
+TarHeader.encode = encode
+TarHeader.decode = decode
+
+// note that this will only do the normal ustar header, not any kind
+// of extended posix header file. If something doesn't fit comfortably,
+// then it will set obj.needExtended = true, and set the block to
+// the closest approximation.
+function encode (obj) {
+ if (!obj && !(this instanceof TarHeader)) throw new Error(
+ "encode must be called on a TarHeader, or supplied an object")
+
+ obj = obj || this
+ var block = obj.block = new Buffer(512)
+
+ // if the object has a "prefix", then that's actually an extension of
+ // the path field.
+ if (obj.prefix) {
+ // console.error("%% header encoding, got a prefix", obj.prefix)
+ obj.path = obj.prefix + "/" + obj.path
+ // console.error("%% header encoding, prefixed path", obj.path)
+ obj.prefix = ""
+ }
+
+ obj.needExtended = false
+
+ if (obj.mode) {
+ if (typeof obj.mode === "string") obj.mode = parseInt(obj.mode, 8)
+ obj.mode = obj.mode & 0777
+ }
+
+ for (var f = 0; fields[f] !== null; f ++) {
+ var field = fields[f]
+ , off = fieldOffs[f]
+ , end = fieldEnds[f]
+ , ret
+
+ switch (field) {
+ case "cksum":
+ // special, done below, after all the others
+ break
+
+ case "prefix":
+ // special, this is an extension of the "path" field.
+ // console.error("%% header encoding, skip prefix later")
+ break
+
+ case "type":
+ // convert from long name to a single char.
+ var type = obj.type || "0"
+ if (type.length > 1) {
+ type = tar.types[obj.type]
+ if (!type) type = "0"
+ }
+ writeText(block, off, end, type)
+ break
+
+ case "path":
+ // uses the "prefix" field if > 100 bytes, but <= 255
+ var pathLen = Buffer.byteLength(obj.path)
+ , pathFSize = fieldSize[fields.path]
+ , prefFSize = fieldSize[fields.prefix]
+
+ // paths between 100 and 255 should use the prefix field.
+ // longer than 255
+ if (pathLen > pathFSize &&
+ pathLen <= pathFSize + prefFSize) {
+ // need to find a slash somewhere in the middle so that
+ // path and prefix both fit in their respective fields
+ var searchStart = pathLen - 1 - pathFSize
+ , searchEnd = prefFSize
+ , found = false
+ , pathBuf = new Buffer(obj.path)
+
+ for ( var s = searchStart
+ ; (s <= searchEnd)
+ ; s ++ ) {
+ if (pathBuf[s] === slash || pathBuf[s] === bslash) {
+ found = s
+ break
+ }
+ }
+
+ if (found !== false) {
+ prefix = pathBuf.slice(0, found).toString("utf8")
+ path = pathBuf.slice(found + 1).toString("utf8")
+
+ ret = writeText(block, off, end, path)
+ off = fieldOffs[fields.prefix]
+ end = fieldEnds[fields.prefix]
+ // console.error("%% header writing prefix", off, end, prefix)
+ ret = writeText(block, off, end, prefix) || ret
+ break
+ }
+ }
+
+ // paths less than 100 chars don't need a prefix
+ // and paths longer than 255 need an extended header and will fail
+ // on old implementations no matter what we do here.
+ // Null out the prefix, and fallthrough to default.
+ // console.error("%% header writing no prefix")
+ var poff = fieldOffs[fields.prefix]
+ , pend = fieldEnds[fields.prefix]
+ writeText(block, poff, pend, "")
+ // fallthrough
+
+ // all other fields are numeric or text
+ default:
+ ret = numeric[field]
+ ? writeNumeric(block, off, end, obj[field])
+ : writeText(block, off, end, obj[field] || "")
+ break
+ }
+ obj.needExtended = obj.needExtended || ret
+ }
+
+ var off = fieldOffs[fields.cksum]
+ , end = fieldEnds[fields.cksum]
+
+ writeNumeric(block, off, end, calcSum.call(this, block))
+
+ return block
+}
+
+// if it's a negative number, or greater than will fit,
+// then use write256.
+var MAXNUM = { 12: 077777777777
+ , 11: 07777777777
+ , 8 : 07777777
+ , 7 : 0777777 }
+function writeNumeric (block, off, end, num) {
+ var writeLen = end - off
+ , maxNum = MAXNUM[writeLen] || 0
+
+ num = num || 0
+ // console.error(" numeric", num)
+
+ if (num instanceof Date ||
+ Object.prototype.toString.call(num) === "[object Date]") {
+ num = num.getTime() / 1000
+ }
+
+ if (num > maxNum || num < 0) {
+ write256(block, off, end, num)
+ // need an extended header if negative or too big.
+ return true
+ }
+
+ // god, tar is so annoying
+ // if the string is small enough, you should put a space
+ // between the octal string and the \0, but if it doesn't
+ // fit, then don't.
+ var numStr = Math.floor(num).toString(8)
+ if (num < MAXNUM[writeLen - 1]) numStr += " "
+
+ // pad with "0" chars
+ if (numStr.length < writeLen) {
+ numStr = (new Array(writeLen - numStr.length).join("0")) + numStr
+ }
+
+ if (numStr.length !== writeLen - 1) {
+ throw new Error("invalid length: " + JSON.stringify(numStr) + "\n" +
+ "expected: "+writeLen)
+ }
+ block.write(numStr, off, writeLen, "utf8")
+ block[end - 1] = 0
+}
+
+function write256 (block, off, end, num) {
+ var buf = block.slice(off, end)
+ var positive = num >= 0
+ buf[0] = positive ? 0x80 : 0xFF
+
+ // get the number as a base-256 tuple
+ if (!positive) num *= -1
+ var tuple = []
+ do {
+ var n = num % 256
+ tuple.push(n)
+ num = (num - n) / 256
+ } while (num)
+
+ var bytes = tuple.length
+
+ var fill = buf.length - bytes
+ for (var i = 1; i < fill; i ++) {
+ buf[i] = positive ? 0 : 0xFF
+ }
+
+ // tuple is a base256 number, with [0] as the *least* significant byte
+ // if it's negative, then we need to flip all the bits once we hit the
+ // first non-zero bit. The 2's-complement is (0x100 - n), and the 1's-
+ // complement is (0xFF - n).
+ var zero = true
+ for (i = bytes; i > 0; i --) {
+ var byte = tuple[bytes - i]
+ if (positive) buf[fill + i] = byte
+ else if (zero && byte === 0) buf[fill + i] = 0
+ else if (zero) {
+ zero = false
+ buf[fill + i] = 0x100 - byte
+ } else buf[fill + i] = 0xFF - byte
+ }
+}
+
+function writeText (block, off, end, str) {
+ // strings are written as utf8, then padded with \0
+ var strLen = Buffer.byteLength(str)
+ , writeLen = Math.min(strLen, end - off)
+ // non-ascii fields need extended headers
+ // long fields get truncated
+ , needExtended = strLen !== str.length || strLen > writeLen
+
+ // write the string, and null-pad
+ if (writeLen > 0) block.write(str, off, writeLen, "utf8")
+ for (var i = off + writeLen; i < end; i ++) block[i] = 0
+
+ return needExtended
+}
+
+function calcSum (block) {
+ block = block || this.block
+ assert(Buffer.isBuffer(block) && block.length === 512)
+
+ if (!block) throw new Error("Need block to checksum")
+
+ // now figure out what it would be if the cksum was " "
+ var sum = 0
+ , start = fieldOffs[fields.cksum]
+ , end = fieldEnds[fields.cksum]
+
+ for (var i = 0; i < fieldOffs[fields.cksum]; i ++) {
+ sum += block[i]
+ }
+
+ for (var i = start; i < end; i ++) {
+ sum += space
+ }
+
+ for (var i = end; i < 512; i ++) {
+ sum += block[i]
+ }
+
+ return sum
+}
+
+
+function checkSum (block) {
+ var sum = calcSum.call(this, block)
+ block = block || this.block
+
+ var cksum = block.slice(fieldOffs[fields.cksum], fieldEnds[fields.cksum])
+ cksum = parseNumeric(cksum)
+
+ return cksum === sum
+}
+
+function decode (block) {
+ block = block || this.block
+ assert(Buffer.isBuffer(block) && block.length === 512)
+
+ this.block = block
+ this.cksumValid = this.checkSum()
+
+ var prefix = null
+
+ // slice off each field.
+ for (var f = 0; fields[f] !== null; f ++) {
+ var field = fields[f]
+ , val = block.slice(fieldOffs[f], fieldEnds[f])
+
+ switch (field) {
+ case "ustar":
+ // if not ustar, then everything after that is just padding.
+ if (val.toString() !== "ustar\0") {
+ this.ustar = false
+ return
+ } else {
+ // console.error("ustar:", val, val.toString())
+ this.ustar = val.toString()
+ }
+ break
+
+ // prefix is special, since it might signal the xstar header
+ case "prefix":
+ var atime = parseNumeric(val.slice(131, 131 + 12))
+ , ctime = parseNumeric(val.slice(131 + 12, 131 + 12 + 12))
+ if ((val[130] === 0 || val[130] === space) &&
+ typeof atime === "number" &&
+ typeof ctime === "number" &&
+ val[131 + 12] === space &&
+ val[131 + 12 + 12] === space) {
+ this.atime = atime
+ this.ctime = ctime
+ val = val.slice(0, 130)
+ }
+ prefix = val.toString("utf8").replace(/\0+$/, "")
+ // console.error("%% header reading prefix", prefix)
+ break
+
+ // all other fields are null-padding text
+ // or a number.
+ default:
+ if (numeric[field]) {
+ this[field] = parseNumeric(val)
+ } else {
+ this[field] = val.toString("utf8").replace(/\0+$/, "")
+ }
+ break
+ }
+ }
+
+ // if we got a prefix, then prepend it to the path.
+ if (prefix) {
+ this.path = prefix + "/" + this.path
+ // console.error("%% header got a prefix", this.path)
+ }
+}
+
+function parse256 (buf) {
+ // first byte MUST be either 80 or FF
+ // 80 for positive, FF for 2's comp
+ var positive
+ if (buf[0] === 0x80) positive = true
+ else if (buf[0] === 0xFF) positive = false
+ else return null
+
+ // build up a base-256 tuple from the least sig to the highest
+ var zero = false
+ , tuple = []
+ for (var i = buf.length - 1; i > 0; i --) {
+ var byte = buf[i]
+ if (positive) tuple.push(byte)
+ else if (zero && byte === 0) tuple.push(0)
+ else if (zero) {
+ zero = false
+ tuple.push(0x100 - byte)
+ } else tuple.push(0xFF - byte)
+ }
+
+ for (var sum = 0, i = 0, l = tuple.length; i < l; i ++) {
+ sum += tuple[i] * Math.pow(256, i)
+ }
+
+ return positive ? sum : -1 * sum
+}
+
+function parseNumeric (f) {
+ if (f[0] & 0x80) return parse256(f)
+
+ var str = f.toString("utf8").split("\0")[0].trim()
+ , res = parseInt(str, 8)
+
+ return isNaN(res) ? null : res
+}
+
diff --git a/deps/npm/node_modules/tar/lib/pack.js b/deps/npm/node_modules/tar/lib/pack.js
new file mode 100644
index 0000000000..411b8b6b0c
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/pack.js
@@ -0,0 +1,212 @@
+// pipe in an fstream, and it'll make a tarball.
+// key-value pair argument is global extended header props.
+
+module.exports = Pack
+
+var EntryWriter = require("./entry-writer.js")
+ , Stream = require("stream").Stream
+ , path = require("path")
+ , inherits = require("inherits")
+ , GlobalHeaderWriter = require("./global-header-writer.js")
+ , collect = require("fstream").collect
+ , eof = new Buffer(512)
+
+for (var i = 0; i < 512; i ++) eof[i] = 0
+
+inherits(Pack, Stream)
+
+function Pack (props) {
+ // console.error("-- p ctor")
+ var me = this
+ if (!(me instanceof Pack)) return new Pack(props)
+
+ me._global = props
+
+ me.readable = true
+ me.writable = true
+ me._buffer = []
+ // console.error("-- -- set current to null in ctor")
+ me._currentEntry = null
+ me._processing = false
+
+ me._pipeRoot = null
+ me.on("pipe", function (src) {
+ if (src.root === me._pipeRoot) return
+ me._pipeRoot = src
+ src.on("end", function () {
+ me._pipeRoot = null
+ })
+ me.add(src)
+ })
+}
+
+Pack.prototype.addGlobal = function (props) {
+ // console.error("-- p addGlobal")
+ if (this._didGlobal) return
+ this._didGlobal = true
+
+ var me = this
+ GlobalHeaderWriter(props)
+ .on("data", function (c) {
+ me.emit("data", c)
+ })
+ .end()
+}
+
+Pack.prototype.add = function (stream) {
+ if (this._global && !this._didGlobal) this.addGlobal(this._global)
+
+ if (this._ended) return this.emit("error", new Error("add after end"))
+
+ collect(stream)
+ this._buffer.push(stream)
+ this._process()
+ this._needDrain = this._buffer.length > 0
+ return !this._needDrain
+}
+
+Pack.prototype.pause = function () {
+ this._paused = true
+ if (this._currentEntry) this._currentEntry.pause()
+ this.emit("pause")
+}
+
+Pack.prototype.resume = function () {
+ this._paused = false
+ if (this._currentEntry) this._currentEntry.resume()
+ this.emit("resume")
+ this._process()
+}
+
+Pack.prototype.end = function () {
+ this._ended = true
+ this._buffer.push(eof)
+ this._process()
+}
+
+Pack.prototype._process = function () {
+ var me = this
+ if (me._paused || me._processing) {
+ return
+ }
+
+ var entry = me._buffer.shift()
+
+ if (!entry) {
+ if (me._needDrain) {
+ me.emit("drain")
+ }
+ return
+ }
+
+ if (entry.ready === false) {
+ // console.error("-- entry is not ready", entry)
+ me._buffer.unshift(entry)
+ entry.on("ready", function () {
+ // console.error("-- -- ready!", entry)
+ me._process()
+ })
+ return
+ }
+
+ me._processing = true
+
+ if (entry === eof) {
+ // need 2 ending null blocks.
+ me.emit("data", eof)
+ me.emit("data", eof)
+ me.emit("end")
+ me.emit("close")
+ return
+ }
+
+ // Change the path to be relative to the root dir that was
+ // added to the tarball.
+ //
+ // XXX This should be more like how -C works, so you can
+ // explicitly set a root dir, and also explicitly set a pathname
+ // in the tarball to use. That way we can skip a lot of extra
+ // work when resolving symlinks for bundled dependencies in npm.
+
+ var root = path.dirname((entry.root || entry).path)
+ var wprops = {}
+
+ Object.keys(entry.props).forEach(function (k) {
+ wprops[k] = entry.props[k]
+ })
+
+ wprops.path = path.relative(root, entry.path)
+
+ switch (wprops.type) {
+ case "Directory":
+ wprops.path += "/"
+ wprops.size = 0
+ break
+ case "Link":
+ var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
+ wprops.linkpath = path.relative(root, lp)
+ wprops.size = 0
+ break
+ case "SymbolicLink":
+ var lp = path.resolve(path.dirname(entry.path), entry.linkpath)
+ wprops.linkpath = path.relative(path.dirname(entry.path), lp)
+ wprops.size = 0
+ break
+ }
+
+ // console.error("-- new writer", wprops)
+ // if (!wprops.type) {
+ // // console.error("-- no type?", entry.constructor.name, entry)
+ // }
+
+ // console.error("-- -- set current to new writer", wprops.path)
+ var writer = me._currentEntry = EntryWriter(wprops)
+
+ writer.parent = me
+
+ // writer.on("end", function () {
+ // // console.error("-- -- writer end", writer.path)
+ // })
+
+ writer.on("data", function (c) {
+ me.emit("data", c)
+ })
+
+ writer.on("header", function () {
+ Buffer.prototype.toJSON = function () {
+ return this.toString().split(/\0/).join(".")
+ }
+ // console.error("-- -- writer header %j", writer.props)
+ if (writer.props.size === 0) nextEntry()
+ })
+ writer.on("close", nextEntry)
+
+ var ended = false
+ function nextEntry () {
+ if (ended) return
+ ended = true
+
+ // console.error("-- -- writer close", writer.path)
+ // console.error("-- -- set current to null", wprops.path)
+ me._currentEntry = null
+ me._processing = false
+ me._process()
+ }
+
+ writer.on("error", function (er) {
+ // console.error("-- -- writer error", writer.path)
+ me.emit("error", er)
+ })
+
+ // if it's the root, then there's no need to add its entries,
+ // or data, since they'll be added directly.
+ if (entry === me._pipeRoot) {
+ // console.error("-- is the root, don't auto-add")
+ writer.add = null
+ }
+
+ entry.pipe(writer)
+}
+
+Pack.prototype.destroy = function () {}
+Pack.prototype.write = function () {}
diff --git a/deps/npm/node_modules/tar/lib/parse.js b/deps/npm/node_modules/tar/lib/parse.js
new file mode 100644
index 0000000000..d9784b5ff7
--- /dev/null
+++ b/deps/npm/node_modules/tar/lib/parse.js
@@ -0,0 +1,253 @@
+
+// A writable stream.
+// It emits "entry" events, which provide a readable stream that has
+// header info attached.
+
+module.exports = Parse.create = Parse
+
+var stream = require("stream")
+ , Stream = stream.Stream
+ , BlockStream = require("block-stream")
+ , tar = require("../tar.js")
+ , TarHeader = require("./header.js")
+ , Entry = require("./entry.js")
+ , BufferEntry = require("./buffer-entry.js")
+ , ExtendedHeader = require("./extended-header.js")
+ , assert = require("assert").ok
+ , inherits = require("inherits")
+ , fstream = require("fstream")
+
+// reading a tar is a lot like reading a directory
+// However, we're actually not going to run the ctor,
+// since it does a stat and various other stuff.
+// This inheritance gives us the pause/resume/pipe
+// behavior that is desired.
+inherits(Parse, fstream.Reader)
+
+function Parse () {
+ var me = this
+ if (!(me instanceof Parse)) return new Parse()
+
+ // doesn't apply fstream.Reader ctor?
+ // no, becasue we don't want to stat/etc, we just
+ // want to get the entry/add logic from .pipe()
+ Stream.apply(me)
+
+ me.writable = true
+ me.readable = true
+ me._stream = new BlockStream(512)
+
+ me._stream.on("error", function (e) {
+ me.emit("error", e)
+ })
+
+ me._stream.on("data", function (c) {
+ me._process(c)
+ })
+
+ me._stream.on("end", function () {
+ me._streamEnd()
+ })
+
+ me._stream.on("drain", function () {
+ me.emit("drain")
+ })
+}
+
+// overridden in Extract class, since it needs to
+// wait for its DirWriter part to finish before
+// emitting "end"
+Parse.prototype._streamEnd = function () {
+ var me = this
+ if (!me._ended) me.error("unexpected eof")
+ me.emit("end")
+}
+
+// a tar reader is actually a filter, not just a readable stream.
+// So, you should pipe a tarball stream into it, and it needs these
+// write/end methods to do that.
+Parse.prototype.write = function (c) {
+ if (this._ended) {
+ return this.error("write() after end()")
+ }
+ return this._stream.write(c)
+}
+
+Parse.prototype.end = function (c) {
+ this._ended = true
+ return this._stream.end(c)
+}
+
+// don't need to do anything, since we're just
+// proxying the data up from the _stream.
+// Just need to override the parent's "Not Implemented"
+// error-thrower.
+Parse.prototype._read = function () {}
+
+Parse.prototype._process = function (c) {
+ assert(c && c.length === 512, "block size should be 512")
+
+ // one of three cases.
+ // 1. A new header
+ // 2. A part of a file/extended header
+ // 3. One of two or more EOF null blocks
+
+ if (this._entry) {
+ var entry = this._entry
+ entry.write(c)
+ if (entry._remaining === 0) {
+ entry.end()
+ this._entry = null
+ }
+ } else {
+ // either zeroes or a header
+ var zero = true
+ for (var i = 0; i < 512 && zero; i ++) {
+ zero = c[i] === 0
+ }
+
+ // eof is *at least* 2 blocks of nulls, and then the end of the
+ // file. you can put blocks of nulls between entries anywhere,
+ // so appending one tarball to another is technically valid.
+ // ending without the eof null blocks is not allowed, however.
+ if (zero) {
+ this._ended = this._eofStarted
+ this._eofStarted = true
+ } else {
+ this._ended = this._eofStarted = false
+ this._startEntry(c)
+ }
+
+ }
+}
+
+// take a header chunk, start the right kind of entry.
+Parse.prototype._startEntry = function (c) {
+ var header = new TarHeader(c)
+ , self = this
+ , entry
+ , ev
+ , EntryType
+ , onend
+ , meta = false
+
+ switch (tar.types[header.type]) {
+ case "File":
+ case "OldFile":
+ case "Link":
+ case "SymbolicLink":
+ case "CharacterDevice":
+ case "BlockDevice":
+ case "Directory":
+ case "FIFO":
+ case "ContiguousFile":
+ case "GNUDumpDir":
+ // start a file.
+ // pass in any extended headers
+ // These ones consumers are typically most interested in.
+ EntryType = Entry
+ ev = "entry"
+ break
+
+ case "GlobalExtendedHeader":
+ // extended headers that apply to the rest of the tarball
+ EntryType = ExtendedHeader
+ onend = function () {
+ self._global = self._global || {}
+ Object.keys(entry.fields).forEach(function (k) {
+ self._global[k] = entry.fields[k]
+ })
+ }
+ ev = "globalExtendedHeader"
+ meta = true
+ break
+
+ case "ExtendedHeader":
+ case "OldExtendedHeader":
+ // extended headers that apply to the next entry
+ EntryType = ExtendedHeader
+ onend = function () {
+ self._extended = entry.fields
+ }
+ ev = "extendedHeader"
+ meta = true
+ break
+
+ case "NextFileHasLongLinkpath":
+ // set linkpath=<contents> in extended header
+ EntryType = BufferEntry
+ onend = function () {
+ self._extended = self._extended || {}
+ self._extended.linkpath = entry.body
+ }
+ ev = "longLinkpath"
+ meta = true
+ break
+
+ case "NextFileHasLongPath":
+ case "OldGnuLongPath":
+ // set path=<contents> in file-extended header
+ EntryType = BufferEntry
+ onend = function () {
+ self._extended = self._extended || {}
+ self._extended.path = entry.body
+ }
+ ev = "longPath"
+ meta = true
+ break
+
+ default:
+ // all the rest we skip, but still set the _entry
+ // member, so that we can skip over their data appropriately.
+ // emit an event to say that this is an ignored entry type?
+ EntryType = Entry
+ ev = "ignoredEntry"
+ break
+ }
+
+ var global, extended
+ if (meta) {
+ global = extended = null
+ } else {
+ var global = this._global
+ var extended = this._extended
+
+ // extendedHeader only applies to one entry, so once we start
+ // an entry, it's over.
+ this._extended = null
+ }
+ entry = new EntryType(header, extended, global)
+ entry.meta = meta
+
+ // only proxy data events of normal files.
+ if (!meta) {
+ entry.on("data", function (c) {
+ me.emit("data", c)
+ })
+ }
+
+ if (onend) entry.on("end", onend)
+
+ this._entry = entry
+ var me = this
+
+ entry.on("pause", function () {
+ me.pause()
+ })
+
+ entry.on("resume", function () {
+ me.resume()
+ })
+
+ if (this.listeners("*").length) {
+ this.emit("*", ev, entry)
+ }
+
+ this.emit(ev, entry)
+
+ // Zero-byte entry. End immediately.
+ if (entry.props.size === 0) {
+ entry.end()
+ this._entry = null
+ }
+}
diff --git a/deps/npm/node_modules/tar/old/README.md b/deps/npm/node_modules/tar/old/README.md
new file mode 100644
index 0000000000..aef9844978
--- /dev/null
+++ b/deps/npm/node_modules/tar/old/README.md
@@ -0,0 +1 @@
+tar for node
diff --git a/deps/npm/node_modules/tar/old/doc/example.js b/deps/npm/node_modules/tar/old/doc/example.js
new file mode 100644
index 0000000000..d29517e49f
--- /dev/null
+++ b/deps/npm/node_modules/tar/old/doc/example.js
@@ -0,0 +1,24 @@
+// request a tar file, and then write it
+require("http").request({...}, function (resp) {
+ resp.pipe(tar.createParser(function (file) {
+ if (file.isDirectory()) {
+ this.pause()
+ return fs.mkdir(file.name, function (er) {
+ if (er) return this.emit("error", er)
+ this.resume()
+ })
+ } else if (file.isSymbolicLink()) {
+ this.pause()
+ return fs.symlink(file.link, file.name, function (er) {
+ if (er) return this.emit("error", er)
+ this.resume()
+ })
+ } else if (file.isFile()) {
+ file.pipe(fs.createWriteStream(file.name))
+ }
+ }))
+ // or maybe just have it do all that internally?
+ resp.pipe(tar.createParser(function (file) {
+ this.create("/extract/target/path", file)
+ }))
+})
diff --git a/deps/npm/node_modules/tar/old/generator.js b/deps/npm/node_modules/tar/old/generator.js
new file mode 100644
index 0000000000..c2506c4151
--- /dev/null
+++ b/deps/npm/node_modules/tar/old/generator.js
@@ -0,0 +1,387 @@
+module.exports = Generator
+Generator.create = create
+
+var tar = require("./tar")
+ , Stream = require("stream").Stream
+ , Parser = require("./parser")
+ , fs = require("fs")
+
+function create (opts) {
+ return new Generator(opts)
+}
+
+function Generator (opts) {
+ this.readable = true
+ this.currentFile = null
+
+ this._paused = false
+ this._ended = false
+ this._queue = []
+
+ this.options = { cwd: process.cwd() }
+ Object.keys(opts).forEach(function (o) {
+ this.options[o] = opts[o]
+ }, this)
+ if (this.options.cwd.slice(-1) !== "/") {
+ this.options.cwd += "/"
+ }
+
+ Stream.apply(this)
+}
+
+Generator.prototype = Object.create(Stream.prototype)
+
+Generator.prototype.pause = function () {
+ if (this.currentFile) this.currentFile.pause()
+ this.paused = true
+ this.emit("pause")
+}
+
+Generator.prototype.resume = function () {
+ this.paused = false
+ if (this.currentFile) this.currentFile.resume()
+ this.emit("resume")
+ this._processQueue()
+}
+
+Generator.prototype.end = function () {
+ this._ended = true
+ this._processQueue()
+}
+
+Generator.prototype.append = function (f, st) {
+ if (this._ended) return this.emit("error", new Error(
+ "Cannot append after ending"))
+
+ // if it's a string, then treat it as a filename.
+ // if it's a number, then treat it as a fd
+ // if it's a Stats, then treat it as a stat object
+ // if it's a Stream, then stream it in.
+ var s = toFileStream(f, st)
+ if (!s) return this.emit("error", new TypeError(
+ "Invalid argument: "+f))
+
+ // make sure it's in the folder being added.
+ if (s.name.indexOf(this.options.cwd) !== 0) {
+ this.emit("error", new Error(
+ "Invalid argument: "+s.name+"\nOutside of "+this.options.cwd))
+ }
+
+ s.name = s.name.substr(this.options.cwd.length)
+ s.pause()
+ this._queue.push(s)
+
+ if (!s._needStat) return this._processQueue()
+
+ var self = this
+ fs.lstat(s.name, function (er, st) {
+ if (er) return self.emit("error", new Error(
+ "invalid file "+s.name+"\n"+er.message))
+ s.mode = st.mode & 0777
+ s.uid = st.uid
+ s.gid = st.gid
+ s.size = st.size
+ s.mtime = +st.mtime / 1000
+ s.type = st.isFile() ? "0"
+ : st.isSymbolicLink() ? "2"
+ : st.isCharacterDevice() ? "3"
+ : st.isBlockDevice() ? "4"
+ : st.isDirectory() ? "5"
+ : st.isFIFO() ? "6"
+ : null
+
+ // TODO: handle all the types in
+ // http://cdrecord.berlios.de/private/man/star/star.4.html
+ // for now, skip over unknown ones.
+ if (s.type === null) {
+ console.error("Unknown file type: " + s.name)
+ // kick out of the queue
+ var i = self._queue.indexOf(s)
+ if (i !== -1) self._queue.splice(i, 1)
+ self._processQueue()
+ return
+ }
+
+ if (s.type === "2") return fs.readlink(s.name, function (er, n) {
+ if (er) return self.emit("error", new Error(
+ "error reading link value "+s.name+"\n"+er.message))
+ s.linkname = n
+ s._needStat = false
+ self._processQueue()
+ })
+ s._needStat = false
+ self._processQueue()
+ })
+ return false
+}
+
+function toFileStream (thing) {
+ if (typeof thing === "string") {
+ return toFileStream(fs.createReadStream(thing))
+ }
+
+ if (thing && typeof thing === "object") {
+ if (thing instanceof (Parser.File)) return thing
+
+ if (thing instanceof Stream) {
+ if (thing.hasOwnProperty("name") &&
+ thing.hasOwnProperty("mode") &&
+ thing.hasOwnProperty("uid") &&
+ thing.hasOwnProperty("gid") &&
+ thing.hasOwnProperty("size") &&
+ thing.hasOwnProperty("mtime") &&
+ thing.hasOwnProperty("type")) return thing
+
+ if (thing instanceof (fs.ReadStream)) {
+ thing.name = thing.path
+ }
+
+ if (thing.name) {
+ thing._needStat = true
+ return thing
+ }
+ }
+ }
+
+ return null
+}
+
+Generator.prototype._processQueue = function processQueue () {
+ console.error("processQueue", this._queue[0])
+ if (this._paused) return false
+
+ if (this.currentFile ||
+ this._queue.length && this._queue[0]._needStat) {
+ // either already processing one, or waiting on something.
+ return
+ }
+
+ var f = this.currentFile = this._queue.shift()
+ if (!f) {
+ if (this._ended) {
+ // close it off with 2 blocks of nulls.
+ this.emit("data", new Buffer(new Array(512 * 2)))
+ this.emit("end")
+ this.emit("close")
+ }
+ return true
+ }
+
+ if (f.type === Parser.File.types.Directory &&
+ f.name.slice(-1) !== "/") {
+ f.name += "/"
+ }
+
+ // write out a Pax header if the file isn't kosher.
+ if (this._needPax(f)) this._emitPax(f)
+
+ // write out the header
+ f.ustar = true
+ this._emitHeader(f)
+ var fpos = 0
+ , self = this
+ console.error("about to read body data", f)
+ f.on("data", function (c) {
+ self.emit("data", c)
+ self.fpos += c.length
+ })
+ f.on("error", function (er) { self.emit("error", er) })
+ f.on("end", function $END () {
+ // pad with \0 out to an even multiple of 512 bytes.
+ // this ensures that every file starts on a block.
+ var b = new Buffer(fpos % 512 || 512)
+
+ for (var i = 0, l = b.length; i < l; i ++) b[i] = 0
+ //console.log(b.length, b)
+ self.emit("data", b)
+ self.currentFile = null
+ self._processQueue()
+ })
+ f.resume()
+}
+
+Generator.prototype._needPax = function (f) {
+ // meh. why not?
+ return true
+
+ return oddTextField(f.name, "NAME") ||
+ oddTextField(f.link, "LINK") ||
+ oddTextField(f.gname, "GNAME") ||
+ oddTextField(f.uname, "UNAME") ||
+ oddTextField(f.prefix, "PREFIX")
+}
+
+// check if a text field is too long or non-ascii
+function oddTextField (val, field) {
+ var nl = Buffer.byteLength(val)
+ , len = tar.fieldSize[field]
+ if (nl > len || nl !== val.length) return true
+}
+
+// emit a Pax header of "key = val" for any file with
+// odd or too-long field values.
+Generator.prototype._emitPax = function (f) {
+ // since these tend to be relatively small, just go ahead
+ // and emit it all in-band. That saves having to keep
+ // track of the pax state in the generator, and we can
+ // go right back to emitting the file in the same tick.
+ var dir = f.name.replace(/[^\/]+\/?$/, "")
+ , base = f.name.substr(dir.length)
+ var pax = { name: dir + "PaxHeader/" +base
+ , mode: 0644
+ , uid: f.uid
+ , gid: f.gid
+ , mtime: +f.mtime
+ // don't know size yet.
+ , size: -1
+ , type: "x" // extended header
+ , ustar: true
+ , ustarVersion: "00"
+ , user: f.user || f.uname || ""
+ , group: f.group || f.gname || ""
+ , dev: { major: f.dev && f.dev.major || 0
+ , minor: f.dev && f.dev.minor || 0 }
+ , prefix: f.prefix
+ , linkname: "" }
+
+ // generate the Pax body
+ var kv = { path: (f.prefix ? f.prefix + "/" : "") + f.name
+ , atime: f.atime
+ , mtime: f.mtime
+ , ctime: f.ctime
+ , charset: "UTF-8"
+ , gid: f.gid
+ , uid: f.uid
+ , uname: f.user || f.uname || ""
+ , gname: f.group || f.gname || ""
+ , linkpath: f.linkpath || ""
+ , size: f.size
+ }
+ // "%d %s=%s\n", <length>, <keyword>, <value>
+ // length includes the length of the length number,
+ // the key=val, and the \n.
+ var body = new Buffer(Object.keys(kv).map(function (key) {
+ if (!kv[key]) return ["", ""]
+
+ var s = new Buffer(" " + key + "=" + kv[key]+"\n")
+ , digits = Math.floor(Math.log(s.length) / Math.log(10)) + 1
+
+ // if adding that many digits will make it go over that length,
+ // then add one to it
+ if (s.length > Math.pow(10, digits) - digits) digits ++
+
+ return [s.length + digits, s]
+ }).reduce(function (l, r) {
+ return l + r[0] + r[1]
+ }, ""))
+
+ pax.size = body.length
+ this._emitHeader(pax)
+ this.emit("data", body)
+ // now the trailing buffer to make it an even number of 512 blocks
+ var b = new Buffer(512 + (body.length % 512 || 512))
+ for (var i = 0, l = b.length; i < l; i ++) b[i] = 0
+ this.emit("data", b)
+}
+
+Generator.prototype._emitHeader = function (f) {
+ var header = new Buffer(new Array(512))
+ , fields = tar.fields
+ , offs = tar.fieldOffs
+ , sz = tar.fieldSize
+
+ addField(header, "NAME", f.name)
+ addField(header, "MODE", f.mode)
+ addField(header, "UID", f.uid)
+ addField(header, "GID", f.gid)
+ addField(header, "SIZE", f.size)
+ addField(header, "MTIME", +f.mtime)
+ // checksum is generated based on it being spaces
+ // then it's written as: "######\0 "
+ // where ### is a zero-lead 6-digit octal number
+ addField(header, "CKSUM", " ")
+
+ addField(header, "TYPE", f.type)
+ addField(header, "LINKNAME", f.linkname || "")
+ if (f.ustar) {
+ console.error(">>> ustar!!")
+ addField(header, "USTAR", tar.ustar)
+ addField(header, "USTARVER", 0)
+ addField(header, "UNAME", f.user || "")
+ addField(header, "GNAME", f.group || "")
+ if (f.dev) {
+ addField(header, "DEVMAJ", f.dev.major || 0)
+ addField(header, "DEVMIN", f.dev.minor || 0)
+ }
+ addField(header, "PREFIX", f.prefix)
+ } else {
+ console.error(">>> no ustar!")
+ }
+
+ // now the header is written except for checksum.
+ var ck = 0
+ for (var i = 0; i < 512; i ++) ck += header[i]
+ addField(header, "CKSUM", nF(ck, 7))
+ header[ offs[fields.CKSUM] + 7 ] = 0
+
+ this.emit("data", header)
+}
+
+function addField (buf, field, val) {
+ var f = tar.fields[field]
+ console.error("Adding field", field, val)
+ val = typeof val === "number"
+ ? nF(val, tar.fieldSize[f])
+ : new Buffer(val || "")
+ val.copy(buf, tar.fieldOffs[f])
+}
+
+function toBase256 (num, len) {
+ console.error("toBase256", num, len)
+ var positive = num > 0
+ , buf = new Buffer(len)
+ if (!positive) {
+ // rare and slow
+ var b = num.toString(2).substr(1)
+ , padTo = (len - 1) * 8
+ b = new Array(padTo - b.length + 1).join("0") + b
+
+ // take the 2's complement
+ var ht = b.match(/^([01]*)(10*)?$/)
+ , head = ht[1]
+ , tail = ht[2]
+ head = head.split("1").join("2")
+ .split("0").join("1")
+ .split("2").join("0")
+ b = head + tail
+
+ buf[0] = 0xFF
+ for (var i = 1; i < len; i ++) {
+ buf[i] = parseInt(buf.substr(i * 8, 8), 2)
+ }
+ return buf
+ }
+
+ buf[0] = 0x80
+ for (var i = 1, l = len, p = l - 1; i < l; i ++, p --) {
+ buf[p] = num % 256
+ num = Math.floor(num / 256)
+ }
+ return buf
+}
+
+function nF (num, size) {
+ var ns = num.toString(8)
+
+ if (num < 0 || ns.length >= size) {
+ // make a base 256 buffer
+ // then return it
+ return toBase256(num, size)
+ }
+
+ var buf = new Buffer(size)
+ ns = new Array(size - ns.length - 1).join("0") + ns + " "
+ buf[size - 1] = 0
+ buf.asciiWrite(ns)
+ return buf
+}
diff --git a/deps/npm/node_modules/tar/old/parser.js b/deps/npm/node_modules/tar/old/parser.js
new file mode 100644
index 0000000000..1582ee7577
--- /dev/null
+++ b/deps/npm/node_modules/tar/old/parser.js
@@ -0,0 +1,344 @@
+module.exports = Parser
+Parser.create = create
+Parser.File = File
+
+var tar = require("./tar")
+ , Stream = require("stream").Stream
+ , fs = require("fs")
+
+function create (cb) {
+ return new Parser(cb)
+}
+
+var s = 0
+ , HEADER = s ++
+ , BODY = s ++
+ , PAD = s ++
+
+function Parser (cb) {
+ this.fields = tar.fields
+ this.fieldSize = tar.fieldSize
+ this.state = HEADER
+ this.position = 0
+ this.currentFile = null
+ this._header = []
+ this._headerPosition = 0
+ this._bodyPosition = 0
+ this.writable = true
+ Stream.apply(this)
+ if (cb) this.on("file", cb)
+}
+
+Parser.prototype = Object.create(Stream.prototype)
+
+Parser.prototype.write = function (chunk) {
+ switch (this.state) {
+ case HEADER:
+ // buffer up to 512 bytes in memory, and then
+ // parse it, emit a "file" event, and stream the rest
+ this._header.push(chunk)
+ this._headerPosition += chunk.length
+ if (this._headerPosition >= tar.headerSize) {
+ return this._parseHeader()
+ }
+ return true
+
+ case BODY:
+ // stream it through until the end of the file is reached,
+ // and then step over any \0 byte padding.
+ var cl = chunk.length
+ , bp = this._bodyPosition
+ , np = cl + bp
+ , s = this.currentFile.size
+ if (np < s) {
+ this._bodyPosition = np
+ return this.currentFile.write(chunk)
+ }
+ var c = chunk.slice(0, (s - bp))
+ this.currentFile.write(c)
+ this._closeFile()
+ return this.write(chunk.slice(s - bp))
+
+ case PAD:
+ for (var i = 0, l = chunk.length; i < l; i ++) {
+ if (chunk[i] !== 0) {
+ this.state = HEADER
+ return this.write(chunk.slice(i))
+ }
+ }
+ }
+ return true
+}
+
+Parser.prototype.end = function (chunk) {
+ if (chunk) this.write(chunk)
+ if (this.currentFile) this._closeFile()
+ this.emit("end")
+ this.emit("close")
+}
+
+// at this point, we have at least 512 bytes of header chunks
+Parser.prototype._parseHeader = function () {
+ var hp = this._headerPosition
+ , last = this._header.pop()
+ , rem
+
+ if (hp < 512) return this.emit("error", new Error(
+ "Trying to parse header before finished"))
+
+ if (hp > 512) {
+ var ll = last.length
+ , llIntend = 512 - hp + ll
+ rem = last.slice(llIntend)
+ last = last.slice(0, llIntend)
+ }
+ this._header.push(last)
+
+ var fields = tar.fields
+ , pos = 0
+ , field = 0
+ , fieldEnds = tar.fieldEnds
+ , fieldSize = tar.fieldSize
+ , set = {}
+ , fpos = 0
+
+ Object.keys(fieldSize).forEach(function (f) {
+ set[ fields[f] ] = new Buffer(fieldSize[f])
+ })
+
+ this._header.forEach(function (chunk) {
+ for (var i = 0, l = chunk.length; i < l; i ++, pos ++, fpos ++) {
+ if (pos >= fieldEnds[field]) {
+ field ++
+ fpos = 0
+ }
+ // header is null-padded, so when the fields run out,
+ // just finish.
+ if (null === fields[field]) return
+ set[fields[field]][fpos] = chunk[i]
+ }
+ })
+
+ this._header.length = 0
+
+ // type definitions here:
+ // http://cdrecord.berlios.de/private/man/star/star.4.html
+ var type = set.TYPE.toString()
+ , file = this.currentFile = new File(set)
+ if (type === "\0" ||
+ type >= "0" && type <= "7") {
+ this._addExtended(file)
+ this.emit("file", file)
+ } else if (type === "g") {
+ this._global = this._global || {}
+ readPax(this, file, this._global)
+ } else if (type === "h" || type === "x" || type === "X") {
+ this._extended = this._extended || {}
+ readPax(this, file, this._extended)
+ } else if (type === "K") {
+ this._readLongField(file, "linkname")
+ } else if (type === "L") {
+ this._readLongField(file, "name")
+ }
+
+ this.state = BODY
+ if (rem) return this.write(rem)
+ return true
+}
+
+function readPax (self, file, obj) {
+ var buf = ""
+ file.on("data", function (c) {
+ buf += c
+ var lines = buf.split(/\r?\n/)
+ buf = lines.pop()
+ lines.forEach(function (line) {
+ line = line.match(/^[0-9]+ ([^=]+)=(.*)/)
+ if (!line) return
+ obj[line[1]] = line[2]
+ })
+ })
+}
+
+Parser.prototype._readLongField = function (f, field) {
+ var self = this
+ this._longFields[field] = ""
+ f.on("data", function (c) {
+ self._longFields[field] += c
+ })
+}
+
+Parser.prototype._addExtended = function (file) {
+ var g = this._global || {}
+ , e = this._extended || {}
+ file.extended = {}
+ ;[g, e].forEach(function (h) {
+ Object.keys(h).forEach(function (k) {
+ file.extended[k] = h[k]
+ // handle known fields
+ switch (k) {
+ case "path": file.name = h[k]; break
+ case "ctime": file.ctime = new Date(1000 * h[k]); break
+ case "mtime": file.mtime = new Date(1000 * h[k]); break
+ case "gid": file.gid = parseInt(h[k], 10); break
+ case "uid": file.uid = parseInt(h[k], 10); break
+ case "charset": file.charset = h[k]; break
+ case "gname": file.group = h[k]; break
+ case "uname": file.user = h[k]; break
+ case "linkpath": file.linkname = h[k]; break
+ case "size": file.size = parseInt(h[k], 10); break
+ case "SCHILY.devmajor": file.dev.major = parseInt(h[k], 10); break
+ case "SCHILY.devminor": file.dev.minor = parseInt(h[k], 10); break
+ }
+ })
+ })
+ var lf = this._longFields || {}
+ Object.keys(lf).forEach(function (f) {
+ file[f] = lf[f]
+ })
+ this._extended = {}
+ this._longFields = {}
+}
+
+Parser.prototype._closeFile = function () {
+ if (!this.currentFile) return this.emit("error", new Error(
+ "Trying to close without current file"))
+
+ this._headerPosition = this._bodyPosition = 0
+ this.currentFile.end()
+ this.currentFile = null
+ this.state = PAD
+}
+
+
+// file stuff
+
+function strF (f) {
+ return f.toString("ascii").split("\0").shift() || ""
+}
+
+function parse256 (buf) {
+ // first byte MUST be either 80 or FF
+ // 80 for positive, FF for 2's comp
+ var positive
+ if (buf[0] === 0x80) positive = true
+ else if (buf[0] === 0xFF) positive = false
+ else return 0
+
+ if (!positive) {
+ // this is rare enough that the string slowness
+ // is not a big deal. You need *very* old files
+ // to ever hit this path.
+ var s = ""
+ for (var i = 1, l = buf.length; i < l; i ++) {
+ var byte = buf[i].toString(2)
+ if (byte.length < 8) {
+ byte = new Array(byte.length - 8 + 1).join("1") + byte
+ }
+ s += byte
+ }
+ var ht = s.match(/^([01]*)(10*)$/)
+ , head = ht[1]
+ , tail = ht[2]
+ head = head.split("1").join("2")
+ .split("0").join("1")
+ .split("2").join("0")
+ return -1 * parseInt(head + tail, 2)
+ }
+
+ var sum = 0
+ for (var i = 1, l = buf.length, p = l - 1; i < l; i ++, p--) {
+ sum += buf[i] * Math.pow(256, p)
+ }
+ return sum
+}
+
+function nF (f) {
+ if (f[0] & 128 === 128) {
+ return parse256(f)
+ }
+ return parseInt(f.toString("ascii").replace(/\0+/g, "").trim(), 8) || 0
+}
+
+function bufferMatch (a, b) {
+ if (a.length != b.length) return false
+ for (var i = 0, l = a.length; i < l; i ++) {
+ if (a[i] !== b[i]) return false
+ }
+ return true
+}
+
+function File (fields) {
+ this._raw = fields
+ this.name = strF(fields.NAME)
+ this.mode = nF(fields.MODE)
+ this.uid = nF(fields.UID)
+ this.gid = nF(fields.GID)
+ this.size = nF(fields.SIZE)
+ this.mtime = new Date(nF(fields.MTIME) * 1000)
+ this.cksum = nF(fields.CKSUM)
+ this.type = strF(fields.TYPE)
+ this.linkname = strF(fields.LINKNAME)
+
+ this.ustar = bufferMatch(fields.USTAR, tar.ustar)
+
+ if (this.ustar) {
+ this.ustarVersion = nF(fields.USTARVER)
+ this.user = strF(fields.UNAME)
+ this.group = strF(fields.GNAME)
+ this.dev = { major: nF(fields.DEVMAJ)
+ , minor: nF(fields.DEVMIN) }
+ this.prefix = strF(fields.PREFIX)
+ if (this.prefix) {
+ this.name = this.prefix + "/" + this.name
+ }
+ }
+
+ this.writable = true
+ this.readable = true
+ Stream.apply(this)
+}
+
+File.prototype = Object.create(Stream.prototype)
+
+File.types = { File: "0"
+ , HardLink: "1"
+ , SymbolicLink: "2"
+ , CharacterDevice: "3"
+ , BlockDevice: "4"
+ , Directory: "5"
+ , FIFO: "6"
+ , ContiguousFile: "7" }
+
+Object.keys(File.types).forEach(function (t) {
+ File.prototype["is"+t] = function () {
+ return File.types[t] === this.type
+ }
+ File.types[ File.types[t] ] = File.types[t]
+})
+
+// contiguous files are treated as regular files for most purposes.
+File.prototype.isFile = function () {
+ return this.type === "0" && this.name.slice(-1) !== "/"
+ || this.type === "7"
+}
+
+File.prototype.isDirectory = function () {
+ return this.type === "5"
+ || this.type === "0" && this.name.slice(-1) === "/"
+}
+
+File.prototype.write = function (c) {
+ this.emit("data", c)
+ return true
+}
+
+File.prototype.end = function (c) {
+ if (c) this.write(c)
+ this.emit("end")
+ this.emit("close")
+}
+
+File.prototype.pause = function () { this.emit("pause") }
+
+File.prototype.resume = function () { this.emit("resume") }
diff --git a/deps/npm/node_modules/tar/old/tar.js b/deps/npm/node_modules/tar/old/tar.js
new file mode 100644
index 0000000000..f70c081d27
--- /dev/null
+++ b/deps/npm/node_modules/tar/old/tar.js
@@ -0,0 +1,74 @@
+// field names that every tar file must have.
+// header is padded to 512 bytes.
+var f = 0
+ , fields = {}
+ , NAME = fields.NAME = f++
+ , MODE = fields.MODE = f++
+ , UID = fields.UID = f++
+ , GID = fields.GID = f++
+ , SIZE = fields.SIZE = f++
+ , MTIME = fields.MTIME = f++
+ , CKSUM = fields.CKSUM = f++
+ , TYPE = fields.TYPE = f++
+ , LINKNAME = fields.LINKNAME = f++
+ , headerSize = 512
+ , fieldSize = []
+
+fieldSize[NAME] = 100
+fieldSize[MODE] = 8
+fieldSize[UID] = 8
+fieldSize[GID] = 8
+fieldSize[SIZE] = 12
+fieldSize[MTIME] = 12
+fieldSize[CKSUM] = 8
+fieldSize[TYPE] = 1
+fieldSize[LINKNAME] = 100
+
+// "ustar\0" may introduce another bunch of headers.
+// these are optional, and will be nulled out if not present.
+var ustar = new Buffer(6)
+ustar.asciiWrite("ustar\0")
+
+var USTAR = fields.USTAR = f++
+ , USTARVER = fields.USTARVER = f++
+ , UNAME = fields.UNAME = f++
+ , GNAME = fields.GNAME = f++
+ , DEVMAJ = fields.DEVMAJ = f++
+ , DEVMIN = fields.DEVMIN = f++
+ , PREFIX = fields.PREFIX = f++
+// terminate fields.
+fields[f] = null
+
+fieldSize[USTAR] = 6
+fieldSize[USTARVER] = 2
+fieldSize[UNAME] = 32
+fieldSize[GNAME] = 32
+fieldSize[DEVMAJ] = 8
+fieldSize[DEVMIN] = 8
+fieldSize[PREFIX] = 155
+
+var fieldEnds = {}
+ , fieldOffs = {}
+ , fe = 0
+for (var i = 0; i < f; i ++) {
+ fieldOffs[i] = fe
+ fieldEnds[i] = (fe += fieldSize[i])
+}
+
+// build a translation table of field names.
+Object.keys(fields).forEach(function (f) {
+ fields[fields[f]] = f
+})
+
+exports.ustar = ustar
+exports.fields = fields
+exports.fieldSize = fieldSize
+exports.fieldOffs = fieldOffs
+exports.fieldEnds = fieldEnds
+exports.headerSize = headerSize
+
+var Parser = exports.Parser = require("./parser")
+exports.createParser = Parser.create
+
+var Generator = exports.Generator = require("./generator")
+exports.createGenerator = Generator.create
diff --git a/deps/npm/node_modules/tar/old/test/test-generator.js b/deps/npm/node_modules/tar/old/test/test-generator.js
new file mode 100644
index 0000000000..dea2732161
--- /dev/null
+++ b/deps/npm/node_modules/tar/old/test/test-generator.js
@@ -0,0 +1,13 @@
+// pipe this file to tar vt
+
+var Generator = require("../generator")
+ , fs = require("fs")
+ , path = require("path")
+ , ohm = fs.createReadStream(path.resolve(__dirname, "tar-files/Ω.txt"))
+ , foo = path.resolve(__dirname, "tar-files/foo.js")
+ , gen = Generator.create({cwd: __dirname})
+
+gen.pipe(process.stdout)
+gen.append(ohm)
+//gen.append(foo)
+gen.end()
diff --git a/deps/npm/node_modules/tar/old/test/test-generator.tar b/deps/npm/node_modules/tar/old/test/test-generator.tar
new file mode 100644
index 0000000000..6752aa51de
--- /dev/null
+++ b/deps/npm/node_modules/tar/old/test/test-generator.tar
Binary files differ
diff --git a/deps/npm/node_modules/tar/old/test/test-generator.txt b/deps/npm/node_modules/tar/old/test/test-generator.txt
new file mode 100644
index 0000000000..349757ba69
--- /dev/null
+++ b/deps/npm/node_modules/tar/old/test/test-generator.txt
Binary files differ
diff --git a/deps/npm/node_modules/tar/old/test/test-parser.js b/deps/npm/node_modules/tar/old/test/test-parser.js
new file mode 100644
index 0000000000..ffc87a13f4
--- /dev/null
+++ b/deps/npm/node_modules/tar/old/test/test-parser.js
@@ -0,0 +1,28 @@
+var p = require("../tar").createParser()
+ , fs = require("fs")
+ , tar = require("../tar")
+
+p.on("file", function (file) {
+ console.error("file start", file.name, file.size, file.extended)
+ console.error(file)
+ Object.keys(file._raw).forEach(function (f) {
+ console.log(f, file._raw[f].toString().replace(/\0+$/, ""))
+ })
+ file.on("data", function (c) {
+ console.error("data", c.toString().replace(/\0+$/, ""))
+ })
+ file.on("end", function () {
+ console.error("end", file.name)
+ })
+})
+
+
+var s = fs.createReadStream(__dirname + "/test-generator.tar")
+s.on("data", function (c) {
+ console.error("stream data", c.toString())
+})
+s.on("end", function () { console.error("stream end") })
+s.on("close", function () { console.error("stream close") })
+p.on("end", function () { console.error("parser end") })
+
+s.pipe(p)
diff --git a/deps/npm/node_modules/tar/old/test/test-tar.tar b/deps/npm/node_modules/tar/old/test/test-tar.tar
new file mode 100644
index 0000000000..e4fab49d53
--- /dev/null
+++ b/deps/npm/node_modules/tar/old/test/test-tar.tar
Binary files differ
diff --git a/deps/npm/node_modules/tar/old/test/test-tar.txt b/deps/npm/node_modules/tar/old/test/test-tar.txt
new file mode 100644
index 0000000000..d9ac62cc58
--- /dev/null
+++ b/deps/npm/node_modules/tar/old/test/test-tar.txt
Binary files differ
diff --git a/deps/npm/node_modules/tar/package.json b/deps/npm/node_modules/tar/package.json
new file mode 100644
index 0000000000..187cd83981
--- /dev/null
+++ b/deps/npm/node_modules/tar/package.json
@@ -0,0 +1,26 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
+ "name": "tar",
+ "description": "tar for node",
+ "version": "0.1.0",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-tar.git"
+ },
+ "main": "tar.js",
+ "scripts": {
+ "test": "rm -rf test/tmp; tap test/*.js"
+ },
+ "engines": {
+ "node": "~0.5.9 || 0.6 || 0.7 || 0.8"
+ },
+ "dependencies": {
+ "inherits": "1.x",
+ "block-stream": "*",
+ "fstream": "~0.1"
+ },
+ "devDependencies": {
+ "tap": "0.x",
+ "rimraf": "1.x"
+ }
+}
diff --git a/deps/npm/node_modules/tar/tar.js b/deps/npm/node_modules/tar/tar.js
new file mode 100644
index 0000000000..b9dbca489c
--- /dev/null
+++ b/deps/npm/node_modules/tar/tar.js
@@ -0,0 +1,172 @@
+// field paths that every tar file must have.
+// header is padded to 512 bytes.
+var f = 0
+ , fields = {}
+ , path = fields.path = f++
+ , mode = fields.mode = f++
+ , uid = fields.uid = f++
+ , gid = fields.gid = f++
+ , size = fields.size = f++
+ , mtime = fields.mtime = f++
+ , cksum = fields.cksum = f++
+ , type = fields.type = f++
+ , linkpath = fields.linkpath = f++
+ , headerSize = 512
+ , blockSize = 512
+ , fieldSize = []
+
+fieldSize[path] = 100
+fieldSize[mode] = 8
+fieldSize[uid] = 8
+fieldSize[gid] = 8
+fieldSize[size] = 12
+fieldSize[mtime] = 12
+fieldSize[cksum] = 8
+fieldSize[type] = 1
+fieldSize[linkpath] = 100
+
+// "ustar\0" may introduce another bunch of headers.
+// these are optional, and will be nulled out if not present.
+
+var ustar = fields.ustar = f++
+ , ustarver = fields.ustarver = f++
+ , uname = fields.uname = f++
+ , gname = fields.gname = f++
+ , devmaj = fields.devmaj = f++
+ , devmin = fields.devmin = f++
+ , prefix = fields.prefix = f++
+ , fill = fields.fill = f++
+
+// terminate fields.
+fields[f] = null
+
+fieldSize[ustar] = 6
+fieldSize[ustarver] = 2
+fieldSize[uname] = 32
+fieldSize[gname] = 32
+fieldSize[devmaj] = 8
+fieldSize[devmin] = 8
+fieldSize[prefix] = 155
+fieldSize[fill] = 12
+
+// nb: prefix field may in fact be 130 bytes of prefix,
+// a null char, 12 bytes for atime, 12 bytes for ctime.
+//
+// To recognize this format:
+// 1. prefix[130] === ' ' or '\0'
+// 2. atime and ctime are octal numeric values
+// 3. atime and ctime have ' ' in their last byte
+
+var fieldEnds = {}
+ , fieldOffs = {}
+ , fe = 0
+for (var i = 0; i < f; i ++) {
+ fieldOffs[i] = fe
+ fieldEnds[i] = (fe += fieldSize[i])
+}
+
+// build a translation table of field paths.
+Object.keys(fields).forEach(function (f) {
+ if (fields[f] !== null) fields[fields[f]] = f
+})
+
+// different values of the 'type' field
+// paths match the values of Stats.isX() functions, where appropriate
+var types =
+ { 0: "File"
+ , "\0": "OldFile" // like 0
+ , 1: "Link"
+ , 2: "SymbolicLink"
+ , 3: "CharacterDevice"
+ , 4: "BlockDevice"
+ , 5: "Directory"
+ , 6: "FIFO"
+ , 7: "ContiguousFile" // like 0
+ // posix headers
+ , g: "GlobalExtendedHeader" // k=v for the rest of the archive
+ , x: "ExtendedHeader" // k=v for the next file
+ // vendor-specific stuff
+ , A: "SolarisACL" // skip
+ , D: "GNUDumpDir" // like 5, but with data, which should be skipped
+ , I: "Inode" // metadata only, skip
+ , K: "NextFileHasLongLinkpath" // data = link path of next file
+ , L: "NextFileHasLongPath" // data = path of next file
+ , M: "ContinuationFile" // skip
+ , N: "OldGnuLongPath" // like L
+ , S: "SparseFile" // skip
+ , V: "TapeVolumeHeader" // skip
+ , X: "OldExtendedHeader" // like x
+ }
+
+Object.keys(types).forEach(function (t) {
+ types[types[t]] = types[types[t]] || t
+})
+
+// values for the mode field
+var modes =
+ { suid: 04000 // set uid on extraction
+ , sgid: 02000 // set gid on extraction
+ , svtx: 01000 // set restricted deletion flag on dirs on extraction
+ , uread: 0400
+ , uwrite: 0200
+ , uexec: 0100
+ , gread: 040
+ , gwrite: 020
+ , gexec: 010
+ , oread: 4
+ , owrite: 2
+ , oexec: 1
+ , all: 07777
+ }
+
+var numeric =
+ { mode: true
+ , uid: true
+ , gid: true
+ , size: true
+ , mtime: true
+ , devmaj: true
+ , devmin: true
+ , cksum: true
+ , atime: true
+ , ctime: true
+ , dev: true
+ , ino: true
+ , nlink: true
+ }
+
+Object.keys(modes).forEach(function (t) {
+ modes[modes[t]] = modes[modes[t]] || t
+})
+
+var knownExtended =
+ { atime: true
+ , charset: true
+ , comment: true
+ , ctime: true
+ , gid: true
+ , gname: true
+ , linkpath: true
+ , mtime: true
+ , path: true
+ , realtime: true
+ , security: true
+ , size: true
+ , uid: true
+ , uname: true }
+
+
+exports.fields = fields
+exports.fieldSize = fieldSize
+exports.fieldOffs = fieldOffs
+exports.fieldEnds = fieldEnds
+exports.types = types
+exports.modes = modes
+exports.numeric = numeric
+exports.headerSize = headerSize
+exports.blockSize = blockSize
+exports.knownExtended = knownExtended
+
+exports.Pack = require("./lib/pack.js")
+exports.Parse = require("./lib/parse.js")
+exports.Extract = require("./lib/extract.js")
diff --git a/deps/npm/node_modules/tar/test/extract.js b/deps/npm/node_modules/tar/test/extract.js
new file mode 100644
index 0000000000..e2dea5c053
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/extract.js
@@ -0,0 +1,406 @@
+var tap = require("tap")
+ , tar = require("../tar.js")
+ , fs = require("fs")
+ , path = require("path")
+ , file = path.resolve(__dirname, "fixtures/c.tar")
+ , target = path.resolve(__dirname, "tmp/extract-test")
+ , index = 0
+ , fstream = require("fstream")
+
+ , ee = 0
+ , expectEntries =
+[ { path: 'c.txt',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 513,
+ linkpath: '',
+ nlink: undefined,
+ dev: undefined,
+ ino: undefined },
+ { path: 'cc.txt',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 513,
+ linkpath: '',
+ nlink: undefined,
+ dev: undefined,
+ ino: undefined },
+ { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 100,
+ linkpath: '',
+ nlink: undefined,
+ dev: undefined,
+ ino: undefined },
+ { path: 'Ω.txt',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 2,
+ linkpath: '',
+ nlink: undefined,
+ dev: undefined,
+ ino: undefined },
+ { path: 'Ω.txt',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 2,
+ linkpath: '',
+ nlink: 1,
+ dev: 234881026,
+ ino: 51693379 },
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 200,
+ linkpath: '',
+ nlink: 1,
+ dev: 234881026,
+ ino: 51681874 },
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 201,
+ linkpath: '',
+ nlink: undefined,
+ dev: undefined,
+ ino: undefined },
+ { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
+ mode: '777',
+ type: '2',
+ depth: undefined,
+ size: 0,
+ linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ nlink: undefined,
+ dev: undefined,
+ ino: undefined },
+ { path: '200-hard',
+ mode: '644',
+ type: '0',
+ depth: undefined,
+ size: 200,
+ linkpath: '',
+ nlink: 2,
+ dev: 234881026,
+ ino: 51681874 },
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: '644',
+ type: '1',
+ depth: undefined,
+ size: 0,
+ linkpath: path.resolve(target, '200-hard'),
+ nlink: 2,
+ dev: 234881026,
+ ino: 51681874 } ]
+
+ , ef = 0
+ , expectFiles =
+[ { path: '',
+ mode: '40755',
+ type: 'Directory',
+ depth: 0,
+ size: 306,
+ linkpath: undefined,
+ nlink: 9 },
+ { path: '/200-hard',
+ mode: '100644',
+ type: 'File',
+ depth: 1,
+ size: 200,
+ linkpath: undefined,
+ nlink: 2 },
+ { path: '/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: '100644',
+ type: 'Link',
+ depth: 1,
+ size: 200,
+ linkpath: '/Users/isaacs/dev-src/js/node-tar/test/tmp/extract-test/200-hard',
+ nlink: 2 },
+ { path: '/200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
+ mode: '120777',
+ type: 'SymbolicLink',
+ depth: 1,
+ size: 200,
+ linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ nlink: 1 },
+ { path: '/c.txt',
+ mode: '100644',
+ type: 'File',
+ depth: 1,
+ size: 513,
+ linkpath: undefined,
+ nlink: 1 },
+ { path: '/cc.txt',
+ mode: '100644',
+ type: 'File',
+ depth: 1,
+ size: 513,
+ linkpath: undefined,
+ nlink: 1 },
+ { path: '/r',
+ mode: '40755',
+ type: 'Directory',
+ depth: 1,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e',
+ mode: '40755',
+ type: 'Directory',
+ depth: 2,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a',
+ mode: '40755',
+ type: 'Directory',
+ depth: 3,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l',
+ mode: '40755',
+ type: 'Directory',
+ depth: 4,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l',
+ mode: '40755',
+ type: 'Directory',
+ depth: 5,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y',
+ mode: '40755',
+ type: 'Directory',
+ depth: 6,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-',
+ mode: '40755',
+ type: 'Directory',
+ depth: 7,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d',
+ mode: '40755',
+ type: 'Directory',
+ depth: 8,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e',
+ mode: '40755',
+ type: 'Directory',
+ depth: 9,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e',
+ mode: '40755',
+ type: 'Directory',
+ depth: 10,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p',
+ mode: '40755',
+ type: 'Directory',
+ depth: 11,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-',
+ mode: '40755',
+ type: 'Directory',
+ depth: 12,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f',
+ mode: '40755',
+ type: 'Directory',
+ depth: 13,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o',
+ mode: '40755',
+ type: 'Directory',
+ depth: 14,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l',
+ mode: '40755',
+ type: 'Directory',
+ depth: 15,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d',
+ mode: '40755',
+ type: 'Directory',
+ depth: 16,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e',
+ mode: '40755',
+ type: 'Directory',
+ depth: 17,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r',
+ mode: '40755',
+ type: 'Directory',
+ depth: 18,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-',
+ mode: '40755',
+ type: 'Directory',
+ depth: 19,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p',
+ mode: '40755',
+ type: 'Directory',
+ depth: 20,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a',
+ mode: '40755',
+ type: 'Directory',
+ depth: 21,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t',
+ mode: '40755',
+ type: 'Directory',
+ depth: 22,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h',
+ mode: '40755',
+ type: 'Directory',
+ depth: 23,
+ size: 102,
+ linkpath: undefined,
+ nlink: 3 },
+ { path: '/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: '100644',
+ type: 'File',
+ depth: 24,
+ size: 100,
+ linkpath: undefined,
+ nlink: 1 },
+ { path: '/Ω.txt',
+ mode: '100644',
+ type: 'File',
+ depth: 1,
+ size: 2,
+ linkpath: undefined,
+ nlink: 1 } ]
+
+
+
+// The extract class basically just pipes the input
+// to a Reader, and then to a fstream.DirWriter
+
+// So, this is as much a test of fstream.Reader and fstream.Writer
+// as it is of tar.Extract, but it sort of makes sense.
+
+tap.test("extract test", function (t) {
+ var extract = tar.Extract(target)
+ var inp = fs.createReadStream(file)
+
+ // give it a weird buffer size to try to break in odd places
+ inp.bufferSize = 1234
+
+ inp.pipe(extract)
+
+ extract.on("end", function () {
+ t.equal(ee, expectEntries.length, "should see "+ee+" entries")
+
+ // should get no more entries after end
+ extract.removeAllListeners("entry")
+ extract.on("entry", function (e) {
+ t.fail("Should not get entries after end!")
+ })
+
+ next()
+ })
+
+ extract.on("entry", function (entry) {
+ var found =
+ { path: entry.path
+ , mode: entry.props.mode.toString(8)
+ , type: entry.props.type
+ , depth: entry.props.depth
+ , size: entry.props.size
+ , linkpath: entry.props.linkpath
+ , nlink: entry.props.nlink
+ , dev: entry.props.dev
+ , ino: entry.props.ino
+ }
+
+ var wanted = expectEntries[ee ++]
+
+ t.equivalent(found, wanted, "tar entry " + ee + " " + wanted.path)
+ })
+
+ function next () {
+ var r = fstream.Reader({ path: target
+ , type: "Directory"
+ // this is just to encourage consistency
+ , sort: "alpha" })
+
+ r.on("ready", function () {
+ foundEntry(r)
+ })
+
+ r.on("end", finish)
+
+ function foundEntry (entry) {
+ var p = entry.path.substr(target.length)
+ var found =
+ { path: p
+ , mode: entry.props.mode.toString(8)
+ , type: entry.props.type
+ , depth: entry.props.depth
+ , size: entry.props.size
+ , linkpath: entry.props.linkpath
+ , nlink: entry.props.nlink
+ }
+
+ var wanted = expectFiles[ef ++]
+
+ t.equivalent(found, wanted, "unpacked file " + ef + " " + wanted.path)
+
+ entry.on("entry", foundEntry)
+ }
+
+ function finish () {
+ t.equal(ef, expectFiles.length, "should have "+ef+" items")
+ t.end()
+ }
+ }
+})
diff --git a/deps/npm/node_modules/tar/test/fixtures/200.tar b/deps/npm/node_modules/tar/test/fixtures/200.tar
new file mode 100644
index 0000000000..7e3a8f3e77
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/200.tar
Binary files differ
diff --git a/deps/npm/node_modules/tar/test/fixtures/200L.hex b/deps/npm/node_modules/tar/test/fixtures/200L.hex
new file mode 100644
index 0000000000..106fce3340
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/200L.hex
@@ -0,0 +1,50 @@
+# longpath header
+2e2f2e2f404c6f6e674c696e6b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303030303030003030303030303000303030303030300030303030303030303331310030303030303030303030300030313135363000204c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ././@LongLink.......................................................................................0000000.0000000.0000000.00000000311.00000000000.011560..L...................................................................................................
+007573746172202000726f6f7400000000000000000000000000000000000000000000000000000000726f6f7400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar...root............................root...................................................................................................................................................................................................................
+
+# longpath data
+32303063636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363630000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc........................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
+# longpath file - note truncated path
+32303063636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363303030303634340030303031373530003030303137353000303030303030303033313100313136353233353435373600303333343036002030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc0000644.0001750.0001750.00000000311.11652354576.033406..0...................................................................................................
+00757374617220200069736161637300000000000000000000000000000000000000000000000000006973616163730000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar...isaacs..........................isaacs.................................................................................................................................................................................................................
+
+# longpath file contents
+32303063636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363630a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc........................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
+# tar eof
+0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
diff --git a/deps/npm/node_modules/tar/test/fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc b/deps/npm/node_modules/tar/test/fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc
new file mode 100644
index 0000000000..c2b6e5096d
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc
@@ -0,0 +1 @@
+200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc \ No newline at end of file
diff --git a/deps/npm/node_modules/tar/test/fixtures/200longlink.tar b/deps/npm/node_modules/tar/test/fixtures/200longlink.tar
new file mode 100644
index 0000000000..bff94eef8e
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/200longlink.tar
Binary files differ
diff --git a/deps/npm/node_modules/tar/test/fixtures/200longname.tar b/deps/npm/node_modules/tar/test/fixtures/200longname.tar
new file mode 100644
index 0000000000..5556567e15
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/200longname.tar
Binary files differ
diff --git a/deps/npm/node_modules/tar/test/fixtures/a.hex b/deps/npm/node_modules/tar/test/fixtures/a.hex
new file mode 100644
index 0000000000..529e9cbb36
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/a.hex
@@ -0,0 +1,14 @@
+-- header --
+612e7478740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000303030363434200030353737363120003030303032342000303030303030303034303120313136353133363033333320303132343531002030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 a.txt...............................................................................................000644..057761..000024..00000000401.11651360333.012451..0...................................................................................................
+00757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000.........................................................................................................................................................................
+
+-- file contents --
+61616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161616161 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa
+61000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 a...............................................................................................................................................................................................................................................................
+
+-- tar eof --
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
diff --git a/deps/npm/node_modules/tar/test/fixtures/a.tar b/deps/npm/node_modules/tar/test/fixtures/a.tar
new file mode 100644
index 0000000000..27604d7ec3
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/a.tar
Binary files differ
diff --git a/deps/npm/node_modules/tar/test/fixtures/a.txt b/deps/npm/node_modules/tar/test/fixtures/a.txt
new file mode 100644
index 0000000000..a6c406965f
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/a.txt
@@ -0,0 +1 @@
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa \ No newline at end of file
diff --git a/deps/npm/node_modules/tar/test/fixtures/b.hex b/deps/npm/node_modules/tar/test/fixtures/b.hex
new file mode 100644
index 0000000000..cf36eb6bbb
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/b.hex
@@ -0,0 +1,14 @@
+-- normal header --
+622e7478740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000303030363434200030353737363120003030303032342000303030303030303130303020313136353133363036373720303132343631002030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 b.txt...............................................................................................000644..057761..000024..00000001000.11651360677.012461..0...................................................................................................
+00757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000.........................................................................................................................................................................
+
+-- file contents - exactly 512 bytes, no null padding --
+62626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262 bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
+62626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262626262 bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb
+
+-- tar eof blocks --
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
diff --git a/deps/npm/node_modules/tar/test/fixtures/b.tar b/deps/npm/node_modules/tar/test/fixtures/b.tar
new file mode 100644
index 0000000000..2d8e7b3ac7
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/b.tar
Binary files differ
diff --git a/deps/npm/node_modules/tar/test/fixtures/b.txt b/deps/npm/node_modules/tar/test/fixtures/b.txt
new file mode 100644
index 0000000000..d6d6f17b3b
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/b.txt
@@ -0,0 +1 @@
+bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb \ No newline at end of file
diff --git a/deps/npm/node_modules/tar/test/fixtures/c.hex b/deps/npm/node_modules/tar/test/fixtures/c.hex
new file mode 100644
index 0000000000..2391bd2fd4
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/c.hex
@@ -0,0 +1,74 @@
+-- c.txt header
+632e7478740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000303030363434200030353737363120003030303032342000303030303030303130303120313136353136353730343220303132343536002030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 c.txt...............................................................................................000644..057761..000024..00000001001.11651657042.012456..0...................................................................................................
+00757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000.........................................................................................................................................................................
+
+-- c.txt data
+63636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363 cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc
+63636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363 cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc
+0a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
+-- cc.txt header
+63632e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000303030363434200030353737363120003030303032342000303030303030303130303120313136353136353730343620303132363235002030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 cc.txt..............................................................................................000644..057761..000024..00000001001.11651657046.012625..0...................................................................................................
+00757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000.........................................................................................................................................................................
+
+-- cc.txt data
+63636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363 cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc
+63636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363 cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc
+0a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
+-- r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccc.... header
+63636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363303030363434200030353737363120003030303032342000303030303030303031343420313136353231353135333320303433333134002030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc000644..057761..000024..00000000144.11652151533.043314..0...................................................................................................
+0075737461720030306973616163730000000000000000000000000000000000000000000000000000737461666600000000000000000000000000000000000000000000000000000030303030303020003030303030302000722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d2f662f6f2f6c2f642f652f722f2d2f702f612f742f680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000..r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h..........................................................................................................................
+
+-- r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/ccc.... contents
+63636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc............................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
+-- omega header, no pax
+cea92e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000303030363434200030353737363120003030303032342000303030303030303030303220313136353233313530363520303133303737002030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 Ω.txt..............................................................................................000644..057761..000024..00000000002.11652315065.013077..0...................................................................................................
+00757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000.........................................................................................................................................................................
+
+-- omega contents
+cea90000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 Ω..............................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
+-- paxheader for omega
+5061784865616465722fcea92e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000303030363434200030353737363120003030303032342000303030303030303031373020313136353233313530363520303135303536002078000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 PaxHeader/Ω.txt....................................................................................000644..057761..000024..00000000170.11652315065.015056..x...................................................................................................
+00757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000.........................................................................................................................................................................
+
+-- paxheader content
+313520706174683dcea92e7478740a3230206374696d653d313331393733373930390a3230206174696d653d313331393733393036310a323420534348494c592e6465763d3233343838313032360a323320534348494c592e696e6f3d35313639333337390a313820534348494c592e6e6c696e6b3d310a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 15.path=Ω.txt.20.ctime=1319737909.20.atime=1319739061.24.SCHILY.dev=234881026.23.SCHILY.ino=51693379.18.SCHILY.nlink=1.........................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
+-- header for omega
+cea92e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000303030363434200030353737363120003030303032342000303030303030303030303220313136353233313530363520303133303737002030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 Ω.txt..............................................................................................000644..057761..000024..00000000002.11652315065.013077..0...................................................................................................
+00757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000.........................................................................................................................................................................
+
+-- omega data
+cea90000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 Ω..............................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
+-- paxheader for 200char filename
+5061784865616465722f323030636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363630000303030363434200030353737363120003030303032342000303030303030303035343120313136353231353133323420303334323330002078000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc..000644..057761..000024..00000000541.11652151324.034230..x...................................................................................................
+00757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000.........................................................................................................................................................................
+
+-- paxheader content
+32313020706174683d32303063636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363630a3230206374696d653d313331393638363836380a3230206174696d653d313331393734313235340a3338204c4942 210.path=200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc.20.ctime=1319686868.20.atime=1319741254.38.LIB
+415243484956452e6372656174696f6e74696d653d313331393638363835320a323420534348494c592e6465763d3233343838313032360a323320534348494c592e696e6f3d35313638313837340a313820534348494c592e6e6c696e6b3d310a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ARCHIVE.creationtime=1319686852.24.SCHILY.dev=234881026.23.SCHILY.ino=51681874.18.SCHILY.nlink=1................................................................................................................................................................
+
+-- header for 200char filename (note truncated path)
+32303063636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636300303030363434200030353737363120003030303032342000303030303030303033313020313136353231353133323420303334333532002030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc.000644..057761..000024..00000000310.11652151324.034352..0...................................................................................................
+00757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000.........................................................................................................................................................................
+
+-- 200char data
+32303063636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363630000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc........................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
+-- tar eof
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
diff --git a/deps/npm/node_modules/tar/test/fixtures/c.tar b/deps/npm/node_modules/tar/test/fixtures/c.tar
new file mode 100644
index 0000000000..f37a40a841
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/c.tar
Binary files differ
diff --git a/deps/npm/node_modules/tar/test/fixtures/c.txt b/deps/npm/node_modules/tar/test/fixtures/c.txt
new file mode 100644
index 0000000000..943d92e5c9
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/c.txt
@@ -0,0 +1 @@
+cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc
diff --git a/deps/npm/node_modules/tar/test/fixtures/cc.txt b/deps/npm/node_modules/tar/test/fixtures/cc.txt
new file mode 100644
index 0000000000..943d92e5c9
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/cc.txt
@@ -0,0 +1 @@
+cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc
diff --git a/deps/npm/node_modules/tar/test/fixtures/foo.hex b/deps/npm/node_modules/tar/test/fixtures/foo.hex
new file mode 100644
index 0000000000..5a1b165f0a
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/foo.hex
@@ -0,0 +1,14 @@
+-- normal header --
+666f6f2e6a7300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000303030363434200030353737363120003030303032342000303030303030303030303420313135343336373037343120303132363137002030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 foo.js..............................................................................................000644..057761..000024..00000000004.11543670741.012617..0...................................................................................................
+00757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000.........................................................................................................................................................................
+
+-- file contents --
+6261720a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 bar.............................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
+-- tar eof --
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
diff --git a/deps/npm/node_modules/tar/test/fixtures/foo.js b/deps/npm/node_modules/tar/test/fixtures/foo.js
new file mode 100644
index 0000000000..5716ca5987
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/foo.js
@@ -0,0 +1 @@
+bar
diff --git a/deps/npm/node_modules/tar/test/fixtures/foo.tar b/deps/npm/node_modules/tar/test/fixtures/foo.tar
new file mode 100644
index 0000000000..0f69f981b4
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/foo.tar
Binary files differ
diff --git a/deps/npm/node_modules/tar/test/fixtures/hardlink-1 b/deps/npm/node_modules/tar/test/fixtures/hardlink-1
new file mode 100644
index 0000000000..c2b6e5096d
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/hardlink-1
@@ -0,0 +1 @@
+200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc \ No newline at end of file
diff --git a/deps/npm/node_modules/tar/test/fixtures/hardlink-2 b/deps/npm/node_modules/tar/test/fixtures/hardlink-2
new file mode 100644
index 0000000000..c2b6e5096d
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/hardlink-2
@@ -0,0 +1 @@
+200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc \ No newline at end of file
diff --git a/deps/npm/node_modules/tar/test/fixtures/omega.hex b/deps/npm/node_modules/tar/test/fixtures/omega.hex
new file mode 100644
index 0000000000..eef879682e
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/omega.hex
@@ -0,0 +1,22 @@
+-- pax header --
+5061784865616465722fcea92e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000303030363434200030353737363120003030303032342000303030303030303031373020313135343337313036313120303135303531002078000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 PaxHeader/Ω.txt....................................................................................000644..057761..000024..00000000170.11543710611.015051..x...................................................................................................
+00757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000.........................................................................................................................................................................
+
+-- pax header contents --
+313520706174683dcea92e7478740a3230206374696d653d313330313435393237380a3230206174696d653d313330313431353738330a323420534348494c592e6465763d3233343838313032360a323320534348494c592e696e6f3d32333737323936360a313820534348494c592e6e6c696e6b3d310a00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 15.path=Ω.txt.20.ctime=1301459278.20.atime=1301415783.24.SCHILY.dev=234881026.23.SCHILY.ino=23772966.18.SCHILY.nlink=1.........................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
+-- normal header --
+cea92e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000303030363434200030353737363120003030303032342000303030303030303030303220313135343337313036313120303133303732002030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 Ω.txt..............................................................................................000644..057761..000024..00000000002.11543710611.013072..0...................................................................................................
+00757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 .ustar.00isaacs..........................staff...........................000000..000000.........................................................................................................................................................................
+
+-- file contents --
+cea90000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 Ω..............................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
+-- tar eof marker --
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ................................................................................................................................................................................................................................................................
+
diff --git a/deps/npm/node_modules/tar/test/fixtures/omega.tar b/deps/npm/node_modules/tar/test/fixtures/omega.tar
new file mode 100644
index 0000000000..6590e58ce4
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/omega.tar
Binary files differ
diff --git a/deps/npm/node_modules/tar/test/fixtures/omega.txt b/deps/npm/node_modules/tar/test/fixtures/omega.txt
new file mode 100644
index 0000000000..1ca042fff2
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/omega.txt
@@ -0,0 +1 @@
+Ω \ No newline at end of file
diff --git a/deps/npm/node_modules/tar/test/fixtures/omegapax.tar b/deps/npm/node_modules/tar/test/fixtures/omegapax.tar
new file mode 100644
index 0000000000..59bbd0860f
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/omegapax.tar
Binary files differ
diff --git a/deps/npm/node_modules/tar/test/fixtures/packtest/omega.txt b/deps/npm/node_modules/tar/test/fixtures/packtest/omega.txt
new file mode 100644
index 0000000000..1ca042fff2
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/packtest/omega.txt
@@ -0,0 +1 @@
+Ω \ No newline at end of file
diff --git a/deps/npm/node_modules/tar/test/fixtures/packtest/star.4.html b/deps/npm/node_modules/tar/test/fixtures/packtest/star.4.html
new file mode 100644
index 0000000000..b600d772f5
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/packtest/star.4.html
@@ -0,0 +1,1184 @@
+<html>
+<head>
+<title>Manpage for star.4</title>
+<META name="description" content="A html version of the manpage for star.4">
+<META name="keywords" content="star.4">
+</head>
+<body background=corrugated_xxxxxlight_metal.gif>
+<PRE>
+<!-- Manpage converted by man2html 3.0.1 -->
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+
+</PRE>
+<H2>NAME</H2><PRE>
+ star - tape archive file format
+
+
+</PRE>
+<H2>DESCRIPTION</H2><PRE>
+ <B>Tar</B> Archives are layered archives. The basic structure is
+ defined by the POSIX.1-1988 archive format and documented in
+ the <B>BASIC</B> <B>TAR</B> <B>HEADER</B> <B>DESCRIPTION</B> section below. The higher
+ level structure is defined by the POSIX.1-2001 extended
+ headers and documented in the <B>EXTENDED</B> <B>TAR</B> <B>(PAX)</B> <B>HEADER</B>
+ <B>STRUCTURE</B> section below. POSIX.1-2001 extended headers are
+ pseudo files that contain an unlimited number of extended
+ header keywords and associated values. The header keywords
+ are documented in the <B>EXTENDED</B> <B>TAR</B> <B>(PAX)</B> <B>HEADER</B> <B>KEYWORDS</B>
+ section below.
+
+
+</PRE>
+<H2>BASIC TAR HEADER DESCRIPTION</H2><PRE>
+ Physically, a POSIX.1-1988 <B>tar</B> archive consists of a series
+ of fixed sized blocks of TBLOCK (512) characters. It con-
+ tains a series of file entries terminated by a logical
+ end-of-archive marker, which consists of two blocks of 512
+ bytes of binary zeroes. Each file entry is represented by a
+ header block that describes the file followed by one or more
+ blocks with the content of the file. The length of each file
+ is rounded up to a multiple of 512 bytes.
+
+ A number of TBLOCK sizes blocks are grouped together to a
+ tape record for physical I/O operations. Each record of <I>n</I>
+ blocks is written with a single <B><A HREF="write.2.html">write(2)</A></B> operation. On mag-
+ netic tapes, this results in a single tape record.
+
+ The header block is defined in star.h as follows:
+ /*
+ * POSIX.1-1988 field size values and magic.
+ */
+ #define TBLOCK 512
+ #define NAMSIZ 100
+ #define PFXSIZ 155
+
+ #define TMODLEN 8
+ #define TUIDLEN 8
+ #define TGIDLEN 8
+ #define TSIZLEN 12
+ #define TMTMLEN 12
+ #define TCKSLEN 8
+
+ #define TMAGIC "ustar" /* ustar magic 6 chars + '\0' */
+ #define TMAGLEN 6 /* "ustar" including '\0' */
+ #define TVERSION "00"
+ #define TVERSLEN 2
+ #define TUNMLEN 32
+ #define TGNMLEN 32
+ #define TDEVLEN 8
+
+Joerg Schilling Last change: 05/10/19 1
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ /*
+ * POSIX.1-1988 typeflag values
+ */
+ #define REGTYPE '0' /* Regular File */
+ #define AREGTYPE '\0' /* Regular File (outdated) */
+ #define LNKTYPE '1' /* Hard Link */
+ #define SYMTYPE '2' /* Symbolic Link */
+ #define CHRTYPE '3' /* Character Special */
+ #define BLKTYPE '4' /* Block Special */
+ #define DIRTYPE '5' /* Directory */
+ #define FIFOTYPE '6' /* FIFO (named pipe) */
+ #define CONTTYPE '7' /* Contiguous File */
+
+ /*
+ * POSIX.1-2001 typeflag extensions.
+ * POSIX.1-2001 calls the extended USTAR format PAX although it is
+ * definitely derived from and based on USTAR. The reason may be that
+ * POSIX.1-2001 calls the tar program outdated and lists the
+ * pax program as the successor.
+ */
+ #define LF_GHDR 'g' /* POSIX.1-2001 global extended header */
+ #define LF_XHDR 'x' /* POSIX.1-2001 extended header */
+
+ See section <B>EXTENDED</B> <B>TAR</B> <B>(PAX)</B> <B>HEADER</B> <B>KEYWORDS</B> for more
+ information about the structure of a POSIX.1-2001 header.
+
+ /*
+ * star/gnu/Sun tar extensions:
+ *
+ * Note that the standards committee allows only capital A through
+ * capital Z for user-defined expansion. This means that defining
+ * something as, say '8' is a *bad* idea.
+ */
+
+ #define LF_ACL 'A' /* Solaris Access Control List */
+ #define LF_DUMPDIR 'D' /* GNU dump dir */
+ #define LF_EXTATTR 'E' /* Solaris Extended Attribute File */
+ #define LF_META 'I' /* Inode (metadata only) no file content */
+ #define LF_LONGLINK 'K' /* NEXT file has a long linkname */
+ #define LF_LONGNAME 'L' /* NEXT file has a long name */
+ #define LF_MULTIVOL 'M' /* Continuation file rest to be skipped */
+ #define LF_NAMES 'N' /* OLD GNU for names &gt; 100 characters*/
+ #define LF_SPARSE 'S' /* This is for sparse files */
+ #define LF_VOLHDR 'V' /* tape/volume header Ignore on extraction */
+ #define LF_VU_XHDR 'X' /* POSIX.1-2001 xtended (Sun VU version) */
+
+ /*
+ * Definitions for the t_mode field
+ */
+ #define TSUID 04000 /* Set UID on execution */
+ #define TSGID 02000 /* Set GID on execution */
+ #define TSVTX 01000 /* On directories, restricted deletion flag */
+
+Joerg Schilling Last change: 05/10/19 2
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ #define TUREAD 00400 /* Read by owner */
+ #define TUWRITE 00200 /* Write by owner special */
+ #define TUEXEC 00100 /* Execute/search by owner */
+ #define TGREAD 00040 /* Read by group */
+ #define TGWRITE 00020 /* Write by group */
+ #define TGEXEC 00010 /* Execute/search by group */
+ #define TOREAD 00004 /* Read by other */
+ #define TOWRITE 00002 /* Write by other */
+ #define TOEXEC 00001 /* Execute/search by other */
+
+ #define TALLMODES 07777 /* The low 12 bits */
+
+ /*
+ * This is the ustar (Posix 1003.1) header.
+ */
+ struct header {
+ char t_name[NAMSIZ]; /* 0 Filename */
+ char t_mode[8]; /* 100 Permissions */
+ char t_uid[8]; /* 108 Numerical User ID */
+ char t_gid[8]; /* 116 Numerical Group ID */
+ char t_size[12]; /* 124 Filesize */
+ char t_mtime[12]; /* 136 st_mtime */
+ char t_chksum[8]; /* 148 Checksum */
+ char t_typeflag; /* 156 Typ of File */
+ char t_linkname[NAMSIZ]; /* 157 Target of Links */
+ char t_magic[TMAGLEN]; /* 257 "ustar" */
+ char t_version[TVERSLEN]; /* 263 Version fixed to 00 */
+ char t_uname[TUNMLEN]; /* 265 User Name */
+ char t_gname[TGNMLEN]; /* 297 Group Name */
+ char t_devmajor[8]; /* 329 Major for devices */
+ char t_devminor[8]; /* 337 Minor for devices */
+ char t_prefix[PFXSIZ]; /* 345 Prefix for t_name */
+ /* 500 End */
+ char t_mfill[12]; /* 500 Filler up to 512 */
+ };
+
+ /*
+ * star header specific definitions
+ */
+ #define STMAGIC "tar" /* star magic */
+ #define STMAGLEN 4 /* "tar" including '\0' */
+
+ /*
+ * This is the new (post Posix 1003.1-1988) xstar header
+ * defined in 1994.
+ *
+ * t_prefix[130] is guaranteed to be ' ' to prevent ustar
+ * compliant implementations from failing.
+ * t_mfill &amp; t_xmagic need to be zero for a 100% ustar compliant
+ * implementation, so setting t_xmagic to
+ * "tar" should be avoided in the future.
+ *
+
+Joerg Schilling Last change: 05/10/19 3
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ * A different method to recognize this format is to verify that
+ * t_prefix[130] is equal to ' ' and
+ * t_atime[0]/t_ctime[0] is an octal number and
+ * t_atime[11] is equal to ' ' and
+ * t_ctime[11] is equal to ' '.
+ *
+ * Note that t_atime[11]/t_ctime[11] may be changed in future.
+ */
+ struct xstar_header {
+ char t_name[NAMSIZ]; /* 0 Filename */
+ char t_mode[8]; /* 100 Permissions */
+ char t_uid[8]; /* 108 Numerical User ID */
+ char t_gid[8]; /* 116 Numerical Group ID */
+ char t_size[12]; /* 124 Filesize */
+ char t_mtime[12]; /* 136 st_mtime */
+ char t_chksum[8]; /* 148 Checksum */
+ char t_typeflag; /* 156 Typ of File */
+ char t_linkname[NAMSIZ]; /* 157 Target of Links */
+ char t_magic[TMAGLEN]; /* 257 "ustar" */
+ char t_version[TVERSLEN]; /* 263 Version fixed to 00 */
+ char t_uname[TUNMLEN]; /* 265 User Name */
+ char t_gname[TGNMLEN]; /* 297 Group Name */
+ char t_devmajor[8]; /* 329 Major for devices */
+ char t_devminor[8]; /* 337 Minor for devices */
+ char t_prefix[131]; /* 345 Prefix for t_name */
+ char t_atime[12]; /* 476 st_atime */
+ char t_ctime[12]; /* 488 st_ctime */
+ char t_mfill[8]; /* 500 Filler up to star magic */
+ char t_xmagic[4]; /* 508 "tar" */
+ };
+
+ struct sparse {
+ char t_offset[12];
+ char t_numbytes[12];
+ };
+
+ #define SPARSE_EXT_HDR 21
+
+ struct xstar_ext_header {
+ struct sparse t_sp[21];
+ char t_isextended;
+ };
+
+ typedef union hblock {
+ char dummy[TBLOCK];
+ long ldummy[TBLOCK/sizeof (long)]; /* force long alignment */
+ struct header dbuf;
+ struct xstar_header xstar_dbuf;
+ struct xstar_ext_header xstar_ext_dbuf;
+ } TCB;
+
+Joerg Schilling Last change: 05/10/19 4
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ For maximum portability, all fields that contain character
+ strings should be limited to use the low 7 bits of a charac-
+ ter.
+
+ The <I>name</I>, <I>linkname</I> and <I>prefix</I> field contain character
+ strings. The strings are null terminated except when they
+ use the full space of 100 characters for the <I>name</I> or <I>link-</I>
+ <I>name</I> field or 155 characters for the <I>prefix</I> field.
+
+ If the <I>prefix</I> does not start with a null character, then
+ <I>prefix</I> and <I>name</I> need to be concatenated by using the <I>prefix</I>,
+ followed a slash character followed by the <I>name</I> field. If a
+ null character appears in <I>name</I> or <I>prefix</I> before the maximum
+ size is reached, the field in question is terminated. This
+ way file names up to 256 characters may be archived. The
+ <I>prefix</I> is not used together with the <I>linkname</I> field, so the
+ maximum length of a link name is 100 characters.
+
+ The fields <I>magic</I>, <I>uname</I> and <I>gname</I> contain null terminated
+ character strings.
+
+ The version field contains the string <B>"00"</B> without a trail-
+ ing zero. It cannot be set to different values as POSIX.1-
+ 1988 did not specify a way to handle different version
+ strings. The <I>typeflag</I> field contains a single character.
+
+ All numeric fields contain <I>size</I>-<I>1</I> leading zero-filled
+ numbers using octal digits. They are followed by one or
+ more space or null characters. All recent implementations
+ only use one space or null character at the end of a numeri-
+ cal field to get maximum space for the octal number. <B>Star</B>
+ always uses a space character as terminator. Numeric fields
+ with 8 characters may hold up to 7 octal digits (7777777)
+ which results is a maximum value of 2097151. Numeric fields
+ with 12 characters may hold up to 11 octal digits
+ (77777777777) which results is a maximum value of
+ 8589934591.
+
+ <I>Star</I> implements a vendor specific (and thus non-POSIX)
+ extension to put bigger numbers into the numeric fields.
+ This is done by using a <B>base</B> <B>256</B> coding. The top bit of the
+ first character in the appropriate 8 character or 12 charac-
+ ter field is set to flag non octal coding. If base 256 cod-
+ ing is in use, then all remaining characters are used to
+ code the number. This results in 7 base 256 digits in 8
+ character fields and in 11 base 256 digits in 12 character
+ fields. All base 256 numbers are two's complement numbers.
+ A base 256 number in a 8 character field may hold 56 bits, a
+ base 256 number in a 12 character field may hold 88 bits.
+ This may extended to 64 bits for 8 character fields and to
+ 95 bits for 12 character fields. For a negative number the
+ first character currently is set to a value of 255 (all 8
+
+Joerg Schilling Last change: 05/10/19 5
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ bits are set). The rightmost character in a 8 or 12 charac-
+ ter field contains the least significant base 256 number.
+ Recent GNU tar versions implement the same extension.
+
+ While the POSIX standard makes obvious that the fields <I>mode</I>,
+ <I>uid</I>, <I>gid</I>, <I>size</I>, <I>chksum</I>, <I>devmajor</I> and <I>devminor</I> should be
+ treated as unsigned numbers, there is no such definition for
+ the <I>time</I> field.
+
+ The mode field contains 12 bits holding permissions, see
+ above for the definitions for each of the permission bits.
+
+ The <I>uid</I> and <I>gid</I> fields contain the numerical user id of the
+ file.
+
+ The <I>size</I> field contains the size of the file in characters.
+ If the <I>tar</I> <I>header</I> is followed by file data, then the amount
+ of data that follows is computed by (<I>size</I> + <I>511</I>) / <I>512</I>.
+
+ The <I>mtime</I> filed contains the number of seconds since Jan 1st
+ 1970 00:00 UTC as retrived via <B><A HREF="stat.2.html">stat(2)</A></B> in <I>st</I>_<I>mtime</I>.
+
+ The <I>chksum</I> field contains a simple checksum over all bytes
+ of the header. To compute the value, all characters in the
+ header are treated as unsigned integers and the characters
+ in the <I>chksum</I> field are treated as if they were all spaces.
+ When the computation starts, the checksum value is initial-
+ ized to 0.
+
+ The <I>typeflag</I> field specifies the type of the file that is
+ archived. If a specific <B>tar</B> implementation does not include
+ support for a specific typeflag value, this implementation
+ will extract the unknown file types as if they were plain
+ files.
+
+ <B>'0'</B> <B>REGTYPE</B>
+ A regular file. If the <I>size</I> field is non zero, then
+ file data follows the header.
+
+ <B>'\0'</B> <B>AREGTYPE</B>
+ For backwards compatibility with pre POSIX.1-1988 <B>tar</B>
+ implementations, a nul character is also recognized as
+ marker for plain files. It is not generated by recent
+ <B>tar</B> implementations. If the <I>size</I> field is non zero,
+ then file data follows the header.
+
+ <B>'1'</B> <B>LNKTYPE</B>
+ The file is a hard link to another file. The name of
+ the file that the file is linked to is in the <I>linkname</I>
+ part of the header. For <B>tar</B> archives written by pre
+ POSIX.1-1988 implementations, the <I>size</I> field usually
+ contains the size of the file and needs to be ignored
+
+Joerg Schilling Last change: 05/10/19 6
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ as no data may follow this header type. For POSIX.1-
+ 1988 compliant archives, the <I>size</I> field needs to be 0.
+ For POSIX.1-2001 compliant archives, the <I>size</I> field may
+ be non zero, indicating that file data is included in
+ the archive.
+
+ <B>'2'</B> <B>SYMTYPE</B>
+ The file is a symbolic link to another file. The name
+ of the file that the file is linked to is in the <I>link-</I>
+ <I>name</I> part of the header. The <I>size</I> field needs to be 0.
+ No file data may follow the header.
+
+ <B>'3'</B> <B>CHRTYPE</B>
+ A character special file. The fields <I>devmajor</I> and <I>dev-</I>
+ <I>minor</I> contain information that defines the file. The
+ meaning of the <I>size</I> field is unspecified by the POSIX
+ standard. No file data may follow the header.
+
+ <B>'4'</B> <B>BLKTYPE</B>
+ A block special file. The fields <I>devmajor</I> and <I>devminor</I>
+ contain information that defines the file. The meaning
+ of the <I>size</I> field is unspecified by the POSIX standard.
+ No file data may follow the header.
+
+ <B>'5'</B> <B>DIRTYPE</B>
+ A directory or sub directory. Old (pre POSIX.1-1988)
+ <B>tar</B> implementations did use the same <I>typeflag</I> value as
+ for plain files and added a slash to the name. If the
+ <I>size</I> field is non zero then it indicates the maximum
+ size in characters the system may allocate for this
+ directory. If the <I>size</I> field is 0, then the system
+ shall not limit the size of the directory. On operating
+ systems where the disk allocation is not done on a
+ directory base, the <I>size</I> field is ignored on extrac-
+ tion. No file data may follow the header.
+
+ <B>'6'</B> <B>FIFOTYPE</B>
+ A named pipe. The meaning of the size field is
+ unspecified by the POSIX standard. The <I>size</I> field must
+ be ignored on extraction. No file data may follow the
+ header.
+
+ <B>'7'</B> <B>CONTTYPE</B>
+ A contiguous file. This is a file that gives special
+ performance attributes. Operating systems that don't
+ support this file type extract this file type as plain
+ files. If the <I>size</I> field is non zero, then file data
+ follows the header.
+
+ <B>'g'</B> <B>GLOBAL</B> <B>POSIX.1-2001</B> <B>HEADER</B>
+ With POSIX.1-2001 pax archives, this type defines a
+ global extended header. The <I>size</I> is always non zero
+
+Joerg Schilling Last change: 05/10/19 7
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ and denotes the sum of the length fields in the
+ extended header data. The data that follows the header
+ is in the <B>pax</B> <B>extended</B> <B>header</B> format. The extended
+ header records in this header type affect all following
+ files in the archive unless they are overwritten by new
+ values. See <B>EXTENDED</B> <B>TAR</B> <B>(PAX)</B> <B>HEADER</B> <B>FORMAT</B> section
+ below.
+
+ <B>'x'</B> <B>EXTENDED</B> <B>POSIX.1-2001</B> <B>HEADER</B>
+ With POSIX.1-2001 pax archives, this type defines an
+ extended header. The <I>size</I> is always non zero and
+ denotes the sum of the length fields in the extended
+ header data. The data that follows the header is in
+ the <B>pax</B> <B>extended</B> <B>header</B> format. The extended header
+ records in this header type only affect the following
+ file in the archive. See <B>EXTENDED</B> <B>TAR</B> <B>(PAX)</B> <B>HEADER</B>
+ <B>FORMAT</B> section below.
+
+ <B>'A'</B> <B>-</B> <B>'Z'</B>
+ Reserved for vendor specific implementations.
+
+ <B>'A'</B> A Solaris ACL entry as used by the <B>tar</B> implementation
+ from Sun. The <I>size</I> is always non zero and denotes the
+ length of the data that follows the header. <B>Star</B>
+ currently is not able to handle this header type.
+
+ <B>'D'</B> A GNU dump directory. This header type is not created
+ by <B>star</B> and handled like a directory during an extract
+ operation, so the content is ignored by <B>star</B>. The <I>size</I>
+ field denotes the length of the data that follows the
+ header.
+
+ <B>'E'</B> A Solaris Extended Attribute File. The <I>size</I> field
+ denotes the length of the data that follows the header.
+ <B>Star</B> currently is not able to handle this header type.
+
+ <B>'I'</B> A <B>inode</B> <B>metadata</B> entry. This header type is used by
+ <B>star</B> to archive inode meta data only. To archive more
+ inode meta data than possible with a POSIX-1.1988 <B>tar</B>
+ header, a header with type <B>'I'</B> is usually preceded by a
+ <B>'x'</B> header. It is used with incremental backups. The
+ <I>size</I> field holds the length of the file. No file data
+ follows this header.
+
+ <B>'K'</B> A long link name. <B>Star</B> is able to read and write this
+ type of header. With the <B>xustar</B> and <B>exustar</B> formats,
+ <B>star</B> prefers to store long link names using the
+ POSIX.1-2001 method. The <I>size</I> is always non zero and
+ denotes the length of the long link name including the
+ trailing null byte. The link name is in the data that
+ follows the header.
+
+Joerg Schilling Last change: 05/10/19 8
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ <B>'L'</B> A long file name. <B>Star</B> is able to read and write this
+ type of header. With the <B>xustar</B> and <B>exustar</B> formats,
+ <B>star</B> prefers to store long file names using the
+ POSIX.1-2001 method. The <I>size</I> is always non zero and
+ denotes the length of the long file name including the
+ trailing null byte. The file name is in the data that
+ follows the header.
+
+ <B>'M'</B> A multi volume continuation entry. It is used by <B>star</B>
+ to tell the extraction program via the <I>size</I> field when
+ the next regular archive header will follow. This
+ allows to start extracting multi volume archives with a
+ volume number greater than one. It is used by GNU tar
+ to verify multi volume continuation volumes. Other
+ fields in the GNU multi volume continuation header are
+ a result of a GNU tar miss conception and cannot be
+ used. If the <I>size</I> field is non zero the data following
+ the header is skipped by <B>star</B> if the volume that starts
+ with it is mounted as the first volume. This header is
+ ignored if the volume that starts with it is mounted as
+ continuation volume.
+
+ <B>'N'</B> An outdated linktype used by old GNU tar versions to
+ store long file names. This type is unsupported by
+ <B>star</B>.
+
+ <B>'S'</B> A sparse file. This header type is used by <B>star</B> and
+ <B>GNU</B> <B>tar</B>. A sparse header is uses instead of a plain
+ file header to denote a sparse file that follows.
+ Directly after the header, a list of sparse hole
+ descriptors follows followed by the compacted file
+ data. With <I>star</I> formats, the <I>size</I> field holds a size
+ that represents the sum of the sparse hole descriptors
+ plus the size of the compacted file data. This allows
+ other <B>tar</B> implementations to correctly skip to the next
+ <B>tar</B> <B>header</B>. With GNU tar, up to 4 sparse hole descrip-
+ tors fit into the sparse header. Additional hole
+ descriptors are not needed if the file has less than 4
+ holes. With GNU tar, the size field breaks general <I>tar</I>
+ header rules and is meaningless because the size of the
+ sparse hole descriptors does not count.
+
+ <B>'V'</B> A volume header. The <I>name</I> field is is used to hold the
+ volume name. <B>Star</B> uses the <I>atime</I> field to hold the
+ volume number in case there is no POSIX.1-2001 extended
+ header. This header type is used by <B>star</B> and <B>GNU</B> <B>tar</B>.
+ If the <I>size</I> field is non zero the data following the
+ header is skipped by <B>star</B>.
+
+ <B>'X'</B> A vendor unique variant of the POSIX.1-2001 extended
+ header type. It has been implemented by Sun many years
+ before the POSIX.1-2001 standard has been approved.
+
+Joerg Schilling Last change: 05/10/19 9
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ See also the <I>typeflag</I> 'x' header type. <B>Star</B> is able to
+ read and write this type of header.
+
+
+</PRE>
+<H2>EXTENDED TAR (PAX) HEADER STRUCTURE</H2><PRE>
+ <B>Block</B> <B>type</B> <B>Description</B>
+
+ Ustar Header [typeflag='g'] <I>Global</I> <I>Extended</I> <I>Header</I>
+ Global Extended Data
+ Ustar Header [typeflag='h'] <I>Extended</I> <I>Header</I>
+ Extended Data
+ Ustar header [typeflag='0'] <I>File</I> <I>with</I> <I>Extended</I> <I>Header</I>
+ Data for File #1
+ Ustar header [typeflag='0'] <I>File</I> <I>without</I> <I>Extended</I> <I>Header</I>
+ Data for File #2
+ Block of binary zeroes <I>First</I> <I>EOF</I> <I>Block</I>
+ Block of binary zeroes <I>Second</I> <I>EOF</I> <I>Block</I>
+
+
+</PRE>
+<H2>EXTENDED TAR (PAX) HEADER FORMAT</H2><PRE>
+ The data block that follows a <B>tar</B> archive header with
+ <I>typeflag</I> <B>'g'</B> or <B>'x'</B> contains one or more records in the fol-
+ lowing format:
+
+ "%d %s=%s\n", &lt;<I>length</I>&gt;, &lt;<I>keyword</I>&gt;, &lt;<I>value</I>&gt;
+
+ Each record starts with a a decimal length field. The length
+ includes the total size of a record including the length
+ field itself and the trailing new line.
+
+ The <I>keyword</I> may not include an equal sign. All keywords
+ beginning with lower case letters and digits are reserved
+ for future use by the POSIX standard.
+
+ If the value field is of zero length, it deletes any header
+ field of the same name that is in effect from the same
+ extended header or from a previous global header.
+
+ Null characters do not delimit any value. The value is only
+ limited by its implicit length.
+
+
+</PRE>
+<H2>EXTENDED TAR (PAX) HEADER KEYWORDS</H2><PRE>
+ POSIX.1-2001 extended <B>pax</B> header keywords. All numerical
+ values are represented as decimal strings. All texts are
+ represented as 7-bit ascii or UTF-8:
+
+ <B>atime</B>
+ The time from <B>st_atime</B> in sub second granularity. <B>Star</B>
+ currently supports a nanosecond granularity.
+
+ <B>charset</B>
+ The name of the character set used to encode the data
+ in the following file(s). This keyword is currently
+
+Joerg Schilling Last change: 05/10/19 10
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ ignored by <B>star</B>.
+
+ <B>comment</B>
+ Any number of characters that  should be treated as
+ comment. <B>Star</B> ignores the comment as documented by the
+ POSIX standard.
+
+ <B>ctime</B>
+ The time from <B>st_ctime</B> in sub second granularity. <B>Star</B>
+ currently supports a nanosecond granularity.
+
+ <B>gid</B> The group ID of the group that owns the file. The
+ argument is a decimal number. This field is used if
+ the group ID of a file is greater than 2097151 (octal
+ 7777777).
+
+ <B>gname</B>
+ The group name of the following file(s) coded in UTF-8
+ if the group name does not fit into 323 characters or
+ cannot be expressed in 7-Bit ASCII.
+
+ <B>linkpath</B>
+ The name of the <I>linkpath</I> coded in UTF-8 if it is longer
+ than 100 characters or cannot be expressed in 7-Bit
+ ASCII.
+
+ <B>mtime</B>
+ The time from <B>st_mtime</B> in sub second granularity. <B>Star</B>
+ currently supports a nanosecond granularity.
+
+ <B>path</B> The name of the <I>linkpath</I> coded in UTF-8 if it does not
+ fit into 100 characters + 155 characters prefix or can-
+ not be expressed in 7-Bit ASCII.
+
+ <B>realtime.</B><I>any</I>
+ The keywords prefixed by <B>realtime.</B> are reserved for
+ future standardization.
+
+ <B>security.</B><I>any</I>
+ The keywords prefixed by <B>security.</B> are reserved for
+ future standardization.
+
+ <B>size</B> The size of the file as decimal number if the file size
+ is greater than 8589934591 (octal 77777777777). The
+ <B>size</B> keyword may not refer to the real file size but is
+ related to the size if the file in the archive. See
+ also <B>SCHILY.realsize</B> for more information.
+
+ <B>uid</B> The uid ID of the group that owns the file. The argu-
+ ment is a decimal number. This field is used if the
+ uid ID of a file is greater than 2097151 (octal
+ 7777777).
+
+Joerg Schilling Last change: 05/10/19 11
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ <B>uname</B>
+ The user name of the following file(s) coded in UTF-8
+ if the user name does not fit into 323 characters or
+ cannot be expressed in 7-Bit ASCII.
+
+ <I>VENDOR</I>.keyword
+ Any keyword that starts with a vendor name in capital
+ letters is reserved for vendor specific extensions by
+ the standard. <B>Star</B> uses a lot of these vendor specific
+ extension. See below for more informations.
+
+
+</PRE>
+<H2>SCHILY PAX EXTENSION KEYWORDS</H2><PRE>
+ <B>Star</B> uses own vendor specific extensions. The <B>SCHILY</B> vendor
+ specific extended <B>pax</B> header keywords are:
+
+ <B>SCHILY.acl.access</B>
+ The ACL for a file.
+
+ Since no official backup format for POSIX access con-
+ trol lists has been defined, <B>star</B> uses the vendor
+ defined attributes <B>SCHILY.acl.access</B> and
+ <B>SCHILY.acl.default</B> for storing the <B>ACL</B> and <B>Default</B> <B>ACL</B>
+ of a file, respectively. The access control lists are
+ stored in the short text form as defined in <B>POSIX</B>
+ <B>1003.1e</B> <B>draft</B> <B>standard</B> <B>17</B>.
+
+ To each named user <B>ACL</B> entry a fourth colon separated
+ field field containing the <I>user</I> <I>identifier</I> (<I>UID</I>) of the
+ associated user is appended. To each named group entry
+ a fourth colon separated field containing the <I>group</I>
+ <I>identifier</I> (<I>GID</I>) of the associated group is appended.
+ (POSIX 1003.1e draft standard 17 allows to add fields
+ to ACL entries.)
+
+ This is an example of the format used for
+ <B>SCHILY.acl.access</B> (a space has been inserted after the
+ equal sign and lines are broken [marked with '\' ] for
+ readability, additional fields in bold):
+
+ SCHILY.acl.access= user::rwx,user:lisa:r-x:<B>502</B>, \
+ group::r-x,group:toolies:rwx:<B>102</B>, \
+ mask::rwx,other::r--x
+
+ The numerical user and group identifiers are essential
+ when restoring a system completely from a backup, as
+ initially the name-to-identifier mappings may not be
+ available, and then file ownership restoration would
+ not work.
+
+ As the archive format that is used for backing up
+ access control lists is compatible with the <B>pax</B> archive
+ format, archives created that way can be restored by
+
+Joerg Schilling Last change: 05/10/19 12
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ <B>star</B> or a POSIX.1-2001 compliant <B>pax</B>. Note that pro-
+ grams other than <B>star</B> will ignore the ACL information.
+
+ <B>SCHILY.acl.default</B>
+ The default ACL for a file. See <B>SCHILY.acl.access</B> for
+ more information.
+
+ This is an example of the format used for
+ <B>SCHILY.acl.default</B> (a space has been inserted after the
+ equal sign and lines are broken [marked with '\' ] for
+ readability, additional fields in bold):
+
+ SCHILY.acl.default= user::rwx,user:lisa:r-x:<B>502</B>, \
+ group::r-x,mask::r-x,other::r-x
+
+ <B>SCHILY.ddev</B>
+ The device ids for names used is the <B>SCHILY.dir</B> dump
+ directory list from <B>st_dev</B> of the file as decimal
+ number. The <B>SCHILY.ddev</B> keyword is followed by a space
+ separated list of device id numbers. Each corresponds
+ exactly to a name in the list found in <B>SCHILY.dir</B>. If
+ a specific device id number is repeated, a comma (,)
+ without a following space may be use to denote that the
+ current device id number is identical to the previous
+ number. This keyword is used in <B>dump</B> mode. This key-
+ word is not yet implemented.
+
+ The value is a signed int. An implementation should be
+ able to handle at least 64 bit values. Note that the
+ value is signed because POSIX does not specify more
+ than the type should be an int.
+
+ <B>SCHILY.dev</B>
+ The device id from <B>st_dev</B> of the file as decimal
+ number. This keyword is used in <B>dump</B> mode.
+
+ The value is a signed int. An implementation should be
+ able to handle at least 64 bit values. Note that the
+ value is signed because POSIX does not specify more
+ than the type should be an int.
+
+ <B>SCHILY.devmajor</B>
+ The device major number of the file if it is a charac-
+ ter or block special file. The argument is a decimal
+ number. This field is used if the device major of the
+ file is greater than 2097151 (octal 7777777).
+
+ The value is a signed int. An implementation should be
+ able to handle at least 64 bit values. Note that the
+ value is signed because POSIX does not specify more
+ than the type should be an int.
+
+Joerg Schilling Last change: 05/10/19 13
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ <B>SCHILY.devminor</B>
+ The device minor number of the file if it is a charac-
+ ter or block special file. The argument is a decimal
+ number. This field is used if the device minor of the
+ file is greater than 2097151 (octal 7777777).
+
+ The value is a signed int. An implementation should be
+ able to handle at least 64 bit values. Note that the
+ value is signed because POSIX does not specify more
+ than the type should be an int.
+
+ <B>SCHILY.dino</B>
+ The inode numbers for names used is the <B>SCHILY.dir</B> dump
+ directory list from <B>st_ino</B> of the file as decimal
+ number. The <B>SCHILY.dino</B> keyword is followed by a space
+ separated list of inode numbers. Each corresponds
+ exactly to a name in the list found in <B>SCHILY.dir</B>.
+ This keyword is used in <B>dump</B> mode.
+
+ The values are unsigned int. An implementation should
+ be able to handle at least 64 bit unsigned values.
+
+ <B>SCHILY.dir</B>
+ A list of filenames (the content) for the current
+ directory. The names are coded in UTF-8. Each file
+ name is prefixed by a single character that is used as
+ a flag. Each file name is limited by a null character.
+ The null character is directly followed by he flag
+ character for the next file name in case the list is
+ not terminated by the current file name. The flag
+ character must not be a null character. By default, a
+ ^A (octal 001) is used. The following flags are
+ defined:
+
+ <B>\000</B> This is the list terminator character - the second
+ null byte, see below.
+
+ <B>^A</B> The default flag that is used in case the <B>dump</B> <B>dir</B>
+ features have not been active.
+
+ <B>Y</B> A non directory file that is in the current
+ (incremental) dump.
+
+ <B>N</B> A non directory file that is not in the current
+ (incremental) dump.
+
+ <B>D</B> A directory that is in the current (incremental)
+ dump.
+
+ <B>d</B> A directory that is not in the current (incremen-
+ tal) dump.
+
+Joerg Schilling Last change: 05/10/19 14
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ The list is terminated by two successive null bytes.
+ The first is the null byte for the last file name. The
+ second null byte is at the position where a flag char-
+ acter would be expected, it acts ad a list terminator.
+ The length tag for the <B>SCHILY.dir</B> data includes both
+ null bytes.
+
+ If a dump mode has been selected that writes compact
+ complete directory information to the beginning of the
+ archive, the flag character may contain values dif-
+ ferent from ^A. <B>Star</B> implementations up to <B>star-1.5</B> do
+ not include this feature. Tar implementations that
+ like to read archives that use the <B>SCHILY.dir</B> keyword,
+ shall not rely on values other than \000 (^@) or \001
+ (^A).
+
+ This keyword is used in <B>dump</B> mode.
+
+ <B>SCHILY.fflags</B>
+ A textual version of the BSD or Linux extended file
+ flags. As this tag has not yet been documented, please
+ look into the <B>star</B> source, file <B>fflags.c</B> for more
+ information.
+
+ <B>SCHILY.filetype</B>
+ A textual version of the real file type of the file.
+ The following names are used:
+
+ <B>unallocated</B> An unknown file type that may
+ be a result of a <B><A HREF="unlink.2.html">unlink(2)</A></B>
+ operation. This should never
+ happen.
+
+ <B>regular</B> A regular file.
+
+ <B>contiguous</B> A contiguous file. On operating
+ systems or file systems that
+ don't support this file type,
+ it is handled like a regular
+ file.
+
+ <B>symlink</B> A symbolic link to any file
+ type.
+
+ <B>directory</B> A directory.
+
+ <B>character</B> <B>special</B> A character special file.
+
+ <B>block</B> <B>special</B> A block special file.
+
+ <B>fifo</B> A named pipe.
+
+Joerg Schilling Last change: 05/10/19 15
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ <B>socket</B> A UNIX domain socket.
+
+ <B>mpx</B> <B>character</B> <B>special</B> A multiplexed character special
+ file.
+
+ <B>mpx</B> <B>block</B> <B>special</B> A multiplexed block special
+ file.
+
+ <B>XENIX</B> <B>nsem</B> A XENIX named semaphore.
+
+ <B>XENIX</B> <B>nshd</B> XENIX shared data.
+
+ <B>door</B> A Solaris door.
+
+ <B>eventcount</B> A UNOS event count.
+
+ <B>whiteout</B> A BSD whiteout directory entry.
+
+ <B>sparse</B> A sparse regular file.
+
+ <B>volheader</B> A volume header.
+
+ <B>unknown/bad</B> Any other unknown file type.
+ This should never happen.
+
+ <B>SCHILY.ino</B>
+ The inode number from <B>st_ino</B> of the file as decimal
+ number. This keyword is used in <B>dump</B> mode.
+
+ The value is an unsigned int. An implementation should
+ be able to handle at least 64 bit unsigned values.
+
+ <B>SCHILY.nlink</B>
+ The link count of the file as decimal number. This
+ keyword is used in <B>dump</B> mode.
+
+ The value is an unsigned int. An implementation should
+ be able to handle at least 32 bit unsigned values.
+
+ <B>SCHILY.offset</B>
+ The <B>offset</B> value for a multi volume continuation
+ header. This keyword is used with multi volume con-
+ tinuation headers. Multi volume continuation headers
+ are used to allow to start reading a multi volume
+ archive past the first volume.
+
+ The value is an unsigned int. An implementation should
+ be able to handle at least 64 bit unsigned values.
+
+ <B>SCHILY.realsize</B>
+ The real size of the file as decimal number. This
+
+Joerg Schilling Last change: 05/10/19 16
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ keyword is used if the real size of the file differs
+ from the visible size of the file in the archive. The
+ real file size differs from the size in the archive if
+ the file type is <B>sparse</B> or if the file is a continua-
+ tion file on a multi volume archive. In case the
+ <B>SCHILY.realsize</B> keyword is needed, it must be past any
+ <B>size</B> keyword in case a <B>size</B> keyword is also present.
+
+ The value is an unsigned int. An implementation should
+ be able to handle at least 64 bit unsigned values.
+
+ <B>SCHILY.tarfiletype</B>
+ The following additional file types are used in
+ <B>SCHILY.tarfiletype</B>:
+
+ <B>hardlink</B>
+ A hard link to any file type.
+
+ <B>dumpdir</B>
+ A directory with dump entries
+
+ <B>multivol</B> <B>continuation</B>
+ A multi volume continuation for any file type.
+
+ <B>meta</B> A meta entry (inode meta data only) for any file
+ type.
+
+ <B>SCHILY.xattr.</B><I>attr</I>
+ A POSIX.1-2001 coded version of the Linux extended file
+ attributes. Linux extended file attributes are
+ name/value pairs. Every attribute <I>name</I> results in a
+ <B>SCHILY.xattr.</B><I>name</I> tag and the value of the extended
+ attribute is used as the value of the POSIX.1-2001
+ header tag. Note that this way of coding is not port-
+ able across platforms. A version for BSD may be
+ created but Solaris includes far more features with
+ extended attribute files than Linux does.
+
+ A future version of <B>star</B> will implement a similar
+ method as the <B>tar</B> program on Solaris currently uses.
+ When this implementation is ready, the
+ <B>SCHILY.xattr.</B><I>name</I> feature may be removed in favor of a
+ truly portable implementation that supports Solaris
+ also.
+
+
+</PRE>
+<H2>SCHILY 'G'LOBAL PAX EXTENSION KEYWORDS</H2><PRE>
+ The following <B>star</B> vendor unique extensions may only appear
+ in <B>'g'lobal</B> extended <B>pax</B> headers:
+
+ <B>SCHILY.archtype</B>
+ The textual version of the archive type used. The
+
+Joerg Schilling Last change: 05/10/19 17
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ textual values used for <B>SCHILY.archtype</B> are the same
+ names that are used in the <B>star</B> command line options to
+ set up a specific archive type.
+
+ In order to allow archive type recognition from this
+ keyword, the minimum tape block size must be 2x512
+ bytes (1024 bytes) and the <B>SCHILY.archtype</B> keyword
+ needs to be in the first 512 bytes of the content of
+ the first <B>'g'lobal</B> <B>pax</B> header. Then the first tape
+ block may be scanned to recognize the archive type.
+
+ <B>SCHILY.release</B>
+ The textual version of the <B>star</B> version string and the
+ platform name where this <B>star</B> has been compiled. The
+ same text appears when calling <I>star</I> -<I>version</I>.
+
+ <B>SCHILY.volhdr.blockoff</B>
+ This keyword is used for multi volume archives. It
+ represents the offset within the whole archive
+ expressed in 512 byte units.
+
+ The value is an unsigned int with a valid range between
+ 1 and infinity. An implementation should be able to
+ handle at least 64 bit unsigned values.
+
+ <B>SCHILY.volhdr.blocksize</B>
+ The tape blocksize expressed in 512 byte units that was
+ used when writing the archive.
+
+ The value is an unsigned int with a valid range between
+ 1 and infinity. An implementation should be able to
+ handle at least 31 bit unsigned values.
+
+ <B>SCHILY.volhdr.cwd</B>
+ This keyword is used in dump mode. It is only used to
+ contain the real backup working directory if the
+ <B>fs</B>-<B>name=</B> option of star is used to overwrite the
+ <B>SCHILY.volhdr.filesys</B> value. Overwriting
+ <B>SCHILY.volhdr.filesys</B> is needed when backups are run on
+ file system snapshots rather than on the real file sys-
+ tem.
+
+ <B>SCHILY.volhdr.device</B>
+ This keyword is used in dump mode. It represents the
+ name of the device that holds the file system data. For
+ disk based file systems, this is the device name of the
+ mounted device.
+
+ This keyword is optional. It helps to correctly iden-
+ tify the file system from which this dump has been
+ made.
+
+Joerg Schilling Last change: 05/10/19 18
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ <B>SCHILY.volhdr.dumpdate</B>
+ This keyword is used in dump mode. It represents the
+ time the current dump did start.
+
+ <B>SCHILY.volhdr.dumplevel</B>
+ This keyword is used in dump mode. It represents the
+ level of the current dump. Dump levels are small
+ numbers, the lowest possible number is 0. Dump level 0
+ represents a full backup. Dump level 1 represents a
+ backup that contains all changes that did occur since
+ the last level 0 dump. Dump level 2 represents a
+ backup that contains all changes that did occur since
+ the last level 1 dump. <B>Star</B> does not specify a maximum
+ allowed dump level but you should try to keep the
+ numbers less than 100.
+
+ The value is an unsigned int with a valid range between
+ 0 and at least 100.
+
+ <B>SCHILY.volhdr.dumptype</B>
+ This keyword is used in dump mode. If the dump is a
+ complete dump of a file system, then the argument is
+ the text <B>full</B>, else the argument is the text <B>partial</B>.
+
+ <B>SCHILY.volhdr.filesys</B>
+ This keyword is used in dump mode. It represents the
+ top level directory for the file system from which this
+ dump has been made. If the dump represents a dump that
+ has an associated level, then the this directory needs
+ to be identical to the root directory of this file sys-
+ tem which is the mount point.
+
+ <B>SCHILY.volhdr.hostname</B>
+ This keyword is used in dump mode. The value is
+ retrieved from <B>gethostname(3)</B> or <B>uname(2)</B>.
+
+ <B>SCHILY.volhdr.label</B>
+ The textual volume label. The volume label must be
+ identical within a set of multi volume archives.
+
+ <B>SCHILY.volhdr.refdate</B>
+ This keyword is used in dump mode if the current dump
+ is an incremental dump with a level &gt; 0. It represents
+ the time the related dump did start.
+
+ <B>SCHILY.volhdr.reflevel</B>
+ This keyword is used in dump mode if the current dump
+ is an incremental dump with a level &gt; 0. It represents
+ the level of the related dump. The related dump is the
+ last dump with a level that is lower that the level of
+ this dump. If a dump with the level of the current
+ dump -1 exists, then this is the related dump level.
+
+Joerg Schilling Last change: 05/10/19 19
+
+
+Schily's USER COMMANDS <B><A HREF="STAR.4.html">STAR(4L)</A></B>
+
+ Otherwise, the dump level is decremented until a valid
+ dump level could be found in the dump database.
+
+ The value is an unsigned int with a valid range between
+ 0 and at least 100.
+
+ <B>SCHILY.volhdr.tapesize</B>
+ This keyword is used for multi volume archives and may
+ be used to verify the volume size on read back. It
+ represents the tape size expressed in 512 byte units.
+ If this keyword is set in multi volume mode, the size
+ of the tape is not autodetected but set from a command
+ line option.
+
+ The value is an unsigned int with a valid range between
+ 1 and infinity. An implementation should be able to
+ handle at least 64 bit unsigned values.
+
+ <B>SCHILY.volhdr.volume</B>
+ This keyword is used for multi volume archives. It
+ represents the volume number within a volume set. The
+ number used for the first volume is 1.
+
+ The value is an unsigned int with a valid range between
+ 1 and infinity. An implementation should be able to
+ handle at least 31 bit unsigned values.
+
+
+</PRE>
+<H2>MULTI VOLUME ARCHIVE HANDLING</H2><PRE>
+ To be documented in the future.
+
+
+</PRE>
+<H2>SEE ALSO</H2><PRE>
+
+</PRE>
+<H2>NOTES</H2><PRE>
+
+</PRE>
+<H2>BUGS</H2><PRE>
+
+</PRE>
+<H2>AUTHOR</H2><PRE>
+
+Joerg Schilling Last change: 05/10/19 20
+
+</PRE>
+<HR>
+<ADDRESS>
+Man(1) output converted with
+<a href="http://www.oac.uci.edu/indiv/ehood/man2html.html">man2html</a>
+</ADDRESS>
+<p><hr>
+<a href="http://www.fhg.de/"><IMG SRC="fhglogr.gif" ALT="FhG " BORDER=0></a>
+<a href="http://www.fokus.fraunhofer.de/home/"><IMG SRC="fhlogo.gif" ALT="FhG FOKUS " BORDER=0></a>
+<a href="http://www.berlios.de/"><IMG SRC="berliOS_logo.jpg" ALT="BerliOS " BORDER=0 WIDTH=159 HEIGHT=41></a>
+<a href="http://cdrecord.berlios.de/old/private/index.html">
+<IMG SRC="schilling.gif" ALT="Schily " BORDER=0 WIDTH=41 HEIGHT=54></a>
+<a href="http://cdrecord.berlios.de/old/private/index.html">Schily's Home</a>
+<a href="ftp://ftp.berlios.de/pub/ved/"><IMG SRC="vedpowered.gif" ALT="VED powered " BORDER=0 ></a>
+
+</body>
+</html>
diff --git a/deps/npm/node_modules/tar/test/fixtures/packtest/Ω.txt b/deps/npm/node_modules/tar/test/fixtures/packtest/Ω.txt
new file mode 100644
index 0000000000..1ca042fff2
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/packtest/Ω.txt
@@ -0,0 +1 @@
+Ω \ No newline at end of file
diff --git a/deps/npm/node_modules/tar/test/fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc b/deps/npm/node_modules/tar/test/fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc
new file mode 100644
index 0000000000..5a5d18e294
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc
@@ -0,0 +1 @@
+cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc \ No newline at end of file
diff --git a/deps/npm/node_modules/tar/test/fixtures/symlink b/deps/npm/node_modules/tar/test/fixtures/symlink
new file mode 120000
index 0000000000..218c28e640
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/symlink
@@ -0,0 +1 @@
+hardlink-1 \ No newline at end of file
diff --git a/deps/npm/node_modules/tar/test/fixtures/Ω.txt b/deps/npm/node_modules/tar/test/fixtures/Ω.txt
new file mode 100644
index 0000000000..1ca042fff2
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/fixtures/Ω.txt
@@ -0,0 +1 @@
+Ω \ No newline at end of file
diff --git a/deps/npm/node_modules/tar/test/header.js b/deps/npm/node_modules/tar/test/header.js
new file mode 100644
index 0000000000..8ea6f79500
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/header.js
@@ -0,0 +1,183 @@
+var tap = require("tap")
+var TarHeader = require("../lib/header.js")
+var tar = require("../tar.js")
+var fs = require("fs")
+
+
+var headers =
+ { "a.txt file header":
+ [ "612e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303430312031313635313336303333332030313234353100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ , { cksumValid: true
+ , path: 'a.txt'
+ , mode: 420
+ , uid: 24561
+ , gid: 20
+ , size: 257
+ , mtime: 1319493851
+ , cksum: 5417
+ , type: '0'
+ , linkpath: ''
+ , ustar: 'ustar\0'
+ , ustarver: '00'
+ , uname: 'isaacs'
+ , gname: 'staff'
+ , devmaj: 0
+ , devmin: 0
+ , fill: '' }
+ ]
+
+ , "omega pax": // the extended header from omega tar.
+ [ "5061784865616465722fcea92e74787400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303137302031313534333731303631312030313530353100207800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ , { cksumValid: true
+ , path: 'PaxHeader/Ω.txt'
+ , mode: 420
+ , uid: 24561
+ , gid: 20
+ , size: 120
+ , mtime: 1301254537
+ , cksum: 6697
+ , type: 'x'
+ , linkpath: ''
+ , ustar: 'ustar\0'
+ , ustarver: '00'
+ , uname: 'isaacs'
+ , gname: 'staff'
+ , devmaj: 0
+ , devmin: 0
+ , fill: '' } ]
+
+ , "omega file header":
+ [ "cea92e7478740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030322031313534333731303631312030313330373200203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ , { cksumValid: true
+ , path: 'Ω.txt'
+ , mode: 420
+ , uid: 24561
+ , gid: 20
+ , size: 2
+ , mtime: 1301254537
+ , cksum: 5690
+ , type: '0'
+ , linkpath: ''
+ , ustar: 'ustar\0'
+ , ustarver: '00'
+ , uname: 'isaacs'
+ , gname: 'staff'
+ , devmaj: 0
+ , devmin: 0
+ , fill: '' } ]
+
+ , "foo.js file header":
+ [ "666f6f2e6a730000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030303030342031313534333637303734312030313236313700203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ , { cksumValid: true
+ , path: 'foo.js'
+ , mode: 420
+ , uid: 24561
+ , gid: 20
+ , size: 4
+ , mtime: 1301246433
+ , cksum: 5519
+ , type: '0'
+ , linkpath: ''
+ , ustar: 'ustar\0'
+ , ustarver: '00'
+ , uname: 'isaacs'
+ , gname: 'staff'
+ , devmaj: 0
+ , devmin: 0
+ , fill: '' }
+ ]
+
+ , "b.txt file header":
+ [ "622e747874000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000030303036343420003035373736312000303030303234200030303030303030313030302031313635313336303637372030313234363100203000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000757374617200303069736161637300000000000000000000000000000000000000000000000000007374616666000000000000000000000000000000000000000000000000000000303030303030200030303030303020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ , { cksumValid: true
+ , path: 'b.txt'
+ , mode: 420
+ , uid: 24561
+ , gid: 20
+ , size: 512
+ , mtime: 1319494079
+ , cksum: 5425
+ , type: '0'
+ , linkpath: ''
+ , ustar: 'ustar\0'
+ , ustarver: '00'
+ , uname: 'isaacs'
+ , gname: 'staff'
+ , devmaj: 0
+ , devmin: 0
+ , fill: '' }
+ ]
+
+ , "deep nested file":
+ [ "636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363636363633030303634342000303537373631200030303030323420003030303030303030313434203131363532313531353333203034333331340020300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000075737461720030306973616163730000000000000000000000000000000000000000000000000000737461666600000000000000000000000000000000000000000000000000000030303030303020003030303030302000722f652f612f6c2f6c2f792f2d2f642f652f652f702f2d2f662f6f2f6c2f642f652f722f2d2f702f612f742f680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
+ , { cksumValid: true,
+ path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc'
+ , mode: 420
+ , uid: 24561
+ , gid: 20
+ , size: 100
+ , mtime: 1319687003
+ , cksum: 18124
+ , type: '0'
+ , linkpath: ''
+ , ustar: 'ustar\0'
+ , ustarver: '00'
+ , uname: 'isaacs'
+ , gname: 'staff'
+ , devmaj: 0
+ , devmin: 0
+ , fill: '' }
+ ]
+ }
+
+tap.test("parsing", function (t) {
+ Object.keys(headers).forEach(function (name) {
+ var h = headers[name]
+ , header = new Buffer(h[0], "hex")
+ , expect = h[1]
+ , parsed = new TarHeader(header)
+
+ // console.error(parsed)
+ t.has(parsed, expect, "parse " + name)
+ })
+ t.end()
+})
+
+tap.test("encoding", function (t) {
+ Object.keys(headers).forEach(function (name) {
+ var h = headers[name]
+ , expect = new Buffer(h[0], "hex")
+ , encoded = TarHeader.encode(h[1])
+
+ // might have slightly different bytes, since the standard
+ // isn't very strict, but should have the same semantics
+ // checkSum will be different, but cksumValid will be true
+
+ var th = new TarHeader(encoded)
+ delete h[1].block
+ delete h[1].needExtended
+ delete h[1].cksum
+ t.has(th, h[1], "fields "+name)
+ })
+ t.end()
+})
+
+// test these manually. they're a bit rare to find in the wild
+tap.test("parseNumeric tests", function (t) {
+ var parseNumeric = TarHeader.parseNumeric
+ , numbers =
+ { "303737373737373700": 2097151
+ , "30373737373737373737373700": 8589934591
+ , "303030303036343400": 420
+ , "800000ffffffffffff": 281474976710655
+ , "ffffff000000000001": -281474976710654
+ , "ffffff000000000000": -281474976710655
+ , "800000000000200000": 2097152
+ , "8000000000001544c5": 1393861
+ , "ffffffffffff1544c5": -15383354 }
+ Object.keys(numbers).forEach(function (n) {
+ var b = new Buffer(n, "hex")
+ t.equal(parseNumeric(b), numbers[n], n + " === " + numbers[n])
+ })
+ t.end()
+})
diff --git a/deps/npm/node_modules/tar/test/pack.js b/deps/npm/node_modules/tar/test/pack.js
new file mode 100644
index 0000000000..3fc808d04f
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/pack.js
@@ -0,0 +1,953 @@
+var tap = require("tap")
+ , tar = require("../tar.js")
+ , pkg = require("../package.json")
+ , Pack = tar.Pack
+ , fstream = require("fstream")
+ , Reader = fstream.Reader
+ , Writer = fstream.Writer
+ , path = require("path")
+ , input = path.resolve(__dirname, "fixtures/")
+ , target = path.resolve(__dirname, "tmp/pack.tar")
+ , uid = process.getuid ? process.getuid() : 0
+ , gid = process.getgid ? process.getgid() : 0
+
+ , entries =
+
+ // the global header and root fixtures/ dir are going to get
+ // a different date each time, so omit that bit.
+ // Also, dev/ino values differ across machines, so that's not
+ // included. Rather than use
+ [ [ 'globalExtendedHeader',
+ { path: 'PaxHeader/',
+ mode: 438,
+ uid: 0,
+ gid: 0,
+ type: 'g',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { "NODETAR.author": pkg.author,
+ "NODETAR.name": pkg.name,
+ "NODETAR.description": pkg.description,
+ "NODETAR.version": pkg.version,
+ "NODETAR.repository.type": pkg.repository.type,
+ "NODETAR.repository.url": pkg.repository.url,
+ "NODETAR.main": pkg.main,
+ "NODETAR.scripts.test": pkg.scripts.test,
+ "NODETAR.engines.node": pkg.engines.node } ]
+
+ , [ 'entry',
+ { path: 'fixtures/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'extendedHeader',
+ { path: 'PaxHeader/fixtures/200cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 402,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 13492,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ 'NODETAR.depth': '1',
+ 'NODETAR.type': 'File',
+ nlink: 1,
+ uid: uid,
+ gid: gid,
+ size: 200,
+ 'NODETAR.blksize': '4096',
+ 'NODETAR.blocks': '8' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 200,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 13475,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ 'NODETAR.depth': '1',
+ 'NODETAR.type': 'File',
+ nlink: 1,
+ 'NODETAR.blksize': '4096',
+ 'NODETAR.blocks': '8' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/a.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 257,
+ mtime: new Date('Mon, 24 Oct 2011 22:04:11 GMT'),
+ cksum: 5114,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/b.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 512,
+ mtime: new Date('Mon, 24 Oct 2011 22:07:59 GMT'),
+ cksum: 5122,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/c.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 513,
+ mtime: new Date('Wed, 26 Oct 2011 01:10:58 GMT'),
+ cksum: 5119,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/cc.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 513,
+ mtime: new Date('Wed, 26 Oct 2011 01:11:02 GMT'),
+ cksum: 5222,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/foo.js',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 4,
+ mtime: new Date('Fri, 21 Oct 2011 21:19:29 GMT'),
+ cksum: 5211,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/hardlink-1',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 200,
+ mtime: new Date('Tue, 15 Nov 2011 03:10:09 GMT'),
+ cksum: 5554,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/hardlink-2',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Tue, 15 Nov 2011 03:10:09 GMT'),
+ cksum: 7428,
+ type: '1',
+ linkpath: 'fixtures/hardlink-1',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/omega.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ mtime: new Date('Fri, 21 Oct 2011 21:19:29 GMT'),
+ cksum: 5537,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/packtest/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/packtest/omega.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ mtime: new Date('Mon, 14 Nov 2011 21:42:24 GMT'),
+ cksum: 6440,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/packtest/star.4.html',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 54081,
+ mtime: new Date("Sun, 06 May 2007 13:25:06 GMT"),
+ cksum: 6566,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'extendedHeader',
+ { path: 'PaxHeader/fixtures/packtest/Ω.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 213,
+ mtime: new Date('Mon, 14 Nov 2011 21:39:39 GMT'),
+ cksum: 7306,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: 'fixtures/packtest/Ω.txt',
+ 'NODETAR.depth': '2',
+ 'NODETAR.type': 'File',
+ nlink: 1,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ 'NODETAR.blksize': '4096',
+ 'NODETAR.blocks': '8' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/packtest/Ω.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ mtime: new Date('Mon, 14 Nov 2011 21:39:39 GMT'),
+ cksum: 6297,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ 'NODETAR.depth': '2',
+ 'NODETAR.type': 'File',
+ nlink: 1,
+ 'NODETAR.blksize': '4096',
+ 'NODETAR.blocks': '8' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 4789,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 4937,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 5081,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 5236,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 5391,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 5559,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 5651,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 5798,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 5946,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 6094,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 6253,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 6345,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 6494,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 6652,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 6807,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 6954,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 7102,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 7263,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 7355,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 7514,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 7658,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:42:46 GMT'),
+ cksum: 7821,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:43:23 GMT'),
+ cksum: 7967,
+ type: '5',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 100,
+ mtime: new Date('Thu, 27 Oct 2011 03:43:23 GMT'),
+ cksum: 17821,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'entry',
+ { path: 'fixtures/symlink',
+ mode: 493,
+ uid: uid,
+ gid: gid,
+ size: 0,
+ mtime: new Date('Tue, 15 Nov 2011 19:57:48 GMT'),
+ cksum: 6337,
+ type: '2',
+ linkpath: 'hardlink-1',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' } ]
+
+ , [ 'extendedHeader',
+ { path: 'PaxHeader/fixtures/Ω.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 204,
+ mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
+ cksum: 6399,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: "fixtures/Ω.txt"
+ , "NODETAR.depth": "1"
+ , "NODETAR.type": "File"
+ , nlink: 1
+ , uid: uid
+ , gid: gid
+ , size: 2
+ , "NODETAR.blksize": "4096"
+ , "NODETAR.blocks": "8" } ]
+
+ , [ 'entry',
+ { path: 'fixtures/Ω.txt',
+ mode: 420,
+ uid: uid,
+ gid: gid,
+ size: 2,
+ mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
+ cksum: 5392,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\u0000',
+ ustarver: '00',
+ uname: '',
+ gname: '',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ 'NODETAR.depth': '1',
+ 'NODETAR.type': 'File',
+ nlink: 1,
+ 'NODETAR.blksize': '4096',
+ 'NODETAR.blocks': '8' } ]
+ ]
+
+
+// first, make sure that the hardlinks are actually hardlinks, or this
+// won't work. Git has a way of replacing them with a copy.
+var hard1 = path.resolve(__dirname, "fixtures/hardlink-1")
+ , hard2 = path.resolve(__dirname, "fixtures/hardlink-2")
+ , fs = require("fs")
+
+try { fs.unlinkSync(hard2) } catch (e) {}
+fs.linkSync(hard1, hard2)
+
+tap.test("with global header", { timeout: 10000 }, function (t) {
+ runTest(t, true)
+})
+
+tap.test("without global header", { timeout: 10000 }, function (t) {
+ runTest(t, false)
+})
+
+function runTest (t, doGH) {
+ var reader = Reader({ path: input
+ , filter: function () {
+ return !this.path.match(/\.(tar|hex)$/)
+ }
+ })
+
+ var pack = Pack(doGH ? pkg : null)
+ var writer = Writer(target)
+
+ // skip the global header if we're not doing that.
+ var entry = doGH ? 0 : 1
+
+ t.ok(reader, "reader ok")
+ t.ok(pack, "pack ok")
+ t.ok(writer, "writer ok")
+
+ pack.pipe(writer)
+
+ var parse = tar.Parse()
+ t.ok(parse, "parser should be ok")
+
+ pack.on("data", function (c) {
+ // console.error("PACK DATA")
+ t.equal(c.length, 512, "parser should emit data in 512byte blocks")
+ parse.write(c)
+ })
+
+ pack.on("end", function () {
+ // console.error("PACK END")
+ t.pass("parser ends")
+ parse.end()
+ })
+
+ pack.on("error", function (er) {
+ t.fail("pack error", er)
+ })
+
+ parse.on("error", function (er) {
+ t.fail("parse error", er)
+ })
+
+ writer.on("error", function (er) {
+ t.fail("writer error", er)
+ })
+
+ reader.on("error", function (er) {
+ t.fail("reader error", er)
+ })
+
+ parse.on("*", function (ev, e) {
+ var wanted = entries[entry++]
+ if (!wanted) {
+ t.fail("unexpected event: "+ev)
+ return
+ }
+ t.equal(ev, wanted[0], "event type should be "+wanted[0])
+ // if (ev !== wanted[0] || e.path !== wanted[1].path) {
+ // console.error(wanted)
+ // console.error([ev, e.props])
+ // throw "break"
+ // }
+ t.has(e.props, wanted[1], "properties "+wanted[1].path)
+ if (wanted[2]) {
+ e.on("end", function () {
+ t.has(e.fields, wanted[2], "should get expected fields")
+ })
+ }
+ })
+
+ reader.pipe(pack)
+
+ writer.on("close", function () {
+ t.equal(entry, entries.length, "should get all expected entries")
+ t.pass("it finished")
+ t.end()
+ })
+
+}
diff --git a/deps/npm/node_modules/tar/test/parse.js b/deps/npm/node_modules/tar/test/parse.js
new file mode 100644
index 0000000000..f765a50129
--- /dev/null
+++ b/deps/npm/node_modules/tar/test/parse.js
@@ -0,0 +1,359 @@
+var tap = require("tap")
+ , tar = require("../tar.js")
+ , fs = require("fs")
+ , path = require("path")
+ , file = path.resolve(__dirname, "fixtures/c.tar")
+ , index = 0
+
+ , expect =
+[ [ 'entry',
+ { path: 'c.txt',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 513,
+ mtime: new Date('Wed, 26 Oct 2011 01:10:58 GMT'),
+ cksum: 5422,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ undefined ],
+ [ 'entry',
+ { path: 'cc.txt',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 513,
+ mtime: new Date('Wed, 26 Oct 2011 01:11:02 GMT'),
+ cksum: 5525,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ undefined ],
+ [ 'entry',
+ { path: 'r/e/a/l/l/y/-/d/e/e/p/-/f/o/l/d/e/r/-/p/a/t/h/cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 100,
+ mtime: new Date('Thu, 27 Oct 2011 03:43:23 GMT'),
+ cksum: 18124,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ undefined ],
+ [ 'entry',
+ { path: 'Ω.txt',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 2,
+ mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
+ cksum: 5695,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ undefined ],
+ [ 'extendedHeader',
+ { path: 'PaxHeader/Ω.txt',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 120,
+ mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
+ cksum: 6702,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: 'Ω.txt',
+ ctime: 1319737909,
+ atime: 1319739061,
+ dev: 234881026,
+ ino: 51693379,
+ nlink: 1 } ],
+ [ 'entry',
+ { path: 'Ω.txt',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 2,
+ mtime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
+ cksum: 5695,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ ctime: new Date('Thu, 27 Oct 2011 17:51:49 GMT'),
+ atime: new Date('Thu, 27 Oct 2011 18:11:01 GMT'),
+ dev: 234881026,
+ ino: 51693379,
+ nlink: 1 },
+ undefined ],
+ [ 'extendedHeader',
+ { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 353,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 14488,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ ctime: 1319686868,
+ atime: 1319741254,
+ 'LIBARCHIVE.creationtime': '1319686852',
+ dev: 234881026,
+ ino: 51681874,
+ nlink: 1 } ],
+ [ 'entry',
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 200,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 14570,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ ctime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ atime: new Date('Thu, 27 Oct 2011 18:47:34 GMT'),
+ 'LIBARCHIVE.creationtime': '1319686852',
+ dev: 234881026,
+ ino: 51681874,
+ nlink: 1 },
+ undefined ],
+ [ 'longPath',
+ { path: '././@LongLink',
+ mode: 0,
+ uid: 0,
+ gid: 0,
+ size: 201,
+ mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
+ cksum: 4976,
+ type: 'L',
+ linkpath: '',
+ ustar: false },
+ '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ],
+ [ 'entry',
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: 1000,
+ gid: 1000,
+ size: 201,
+ mtime: new Date('Thu, 27 Oct 2011 22:21:50 GMT'),
+ cksum: 14086,
+ type: '0',
+ linkpath: '',
+ ustar: false },
+ undefined ],
+ [ 'longLinkpath',
+ { path: '././@LongLink',
+ mode: 0,
+ uid: 0,
+ gid: 0,
+ size: 201,
+ mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
+ cksum: 4975,
+ type: 'K',
+ linkpath: '',
+ ustar: false },
+ '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc' ],
+ [ 'longPath',
+ { path: '././@LongLink',
+ mode: 0,
+ uid: 0,
+ gid: 0,
+ size: 201,
+ mtime: new Date('Thu, 01 Jan 1970 00:00:00 GMT'),
+ cksum: 4976,
+ type: 'L',
+ linkpath: '',
+ ustar: false },
+ '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL' ],
+ [ 'entry',
+ { path: '200LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',
+ mode: 511,
+ uid: 1000,
+ gid: 1000,
+ size: 0,
+ mtime: new Date('Fri, 28 Oct 2011 23:05:17 GMT'),
+ cksum: 21603,
+ type: '2',
+ linkpath: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ ustar: false },
+ undefined ],
+ [ 'extendedHeader',
+ { path: 'PaxHeader/200-hard',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 143,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 6533,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { ctime: 1320617144,
+ atime: 1320617232,
+ 'LIBARCHIVE.creationtime': '1319686852',
+ dev: 234881026,
+ ino: 51681874,
+ nlink: 2 } ],
+ [ 'entry',
+ { path: '200-hard',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 200,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 5526,
+ type: '0',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'),
+ atime: new Date('Sun, 06 Nov 2011 22:07:12 GMT'),
+ 'LIBARCHIVE.creationtime': '1319686852',
+ dev: 234881026,
+ ino: 51681874,
+ nlink: 2 },
+ undefined ],
+ [ 'extendedHeader',
+ { path: 'PaxHeader/200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 353,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 14488,
+ type: 'x',
+ linkpath: '',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '' },
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ ctime: 1320617144,
+ atime: 1320617406,
+ 'LIBARCHIVE.creationtime': '1319686852',
+ dev: 234881026,
+ ino: 51681874,
+ nlink: 2 } ],
+ [ 'entry',
+ { path: '200ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc',
+ mode: 420,
+ uid: 24561,
+ gid: 20,
+ size: 0,
+ mtime: new Date('Thu, 27 Oct 2011 03:41:08 GMT'),
+ cksum: 15173,
+ type: '1',
+ linkpath: '200-hard',
+ ustar: 'ustar\0',
+ ustarver: '00',
+ uname: 'isaacs',
+ gname: 'staff',
+ devmaj: 0,
+ devmin: 0,
+ fill: '',
+ ctime: new Date('Sun, 06 Nov 2011 22:05:44 GMT'),
+ atime: new Date('Sun, 06 Nov 2011 22:10:06 GMT'),
+ 'LIBARCHIVE.creationtime': '1319686852',
+ dev: 234881026,
+ ino: 51681874,
+ nlink: 2 },
+ undefined ] ]
+
+
+tap.test("parser test", function (t) {
+ var parser = tar.Parse()
+
+ parser.on("end", function () {
+ t.equal(index, expect.length, "saw all expected events")
+ t.end()
+ })
+
+ fs.createReadStream(file)
+ .pipe(parser)
+ .on("*", function (ev, entry) {
+ var wanted = expect[index]
+ if (!wanted) {
+ return t.fail("Unexpected event: " + ev)
+ }
+ var result = [ev, entry.props]
+ entry.on("end", function () {
+ result.push(entry.fields || entry.body)
+
+ t.equal(ev, wanted[0], index + " event type")
+ t.equivalent(entry.props, wanted[1], wanted[1].path + " entry properties")
+ if (wanted[2]) {
+ t.equivalent(result[2], wanted[2], "metadata values")
+ }
+ index ++
+ })
+ })
+})
diff --git a/deps/npm/node_modules/which/LICENSE b/deps/npm/node_modules/which/LICENSE
new file mode 100644
index 0000000000..05a4010949
--- /dev/null
+++ b/deps/npm/node_modules/which/LICENSE
@@ -0,0 +1,23 @@
+Copyright 2009, 2010, 2011 Isaac Z. Schlueter.
+All rights reserved.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
diff --git a/deps/npm/node_modules/which/README.md b/deps/npm/node_modules/which/README.md
new file mode 100644
index 0000000000..ff1eb531a7
--- /dev/null
+++ b/deps/npm/node_modules/which/README.md
@@ -0,0 +1,5 @@
+The "which" util from npm's guts.
+
+Finds the first instance of a specified executable in the PATH
+environment variable. Does not cache the results, so `hash -r` is not
+needed when the PATH changes.
diff --git a/deps/npm/node_modules/which/bin/which b/deps/npm/node_modules/which/bin/which
new file mode 100755
index 0000000000..8432ce2f6d
--- /dev/null
+++ b/deps/npm/node_modules/which/bin/which
@@ -0,0 +1,14 @@
+#!/usr/bin/env node
+var which = require("../")
+if (process.argv.length < 3) {
+ console.error("Usage: which <thing>")
+ process.exit(1)
+}
+
+which(process.argv[2], function (er, thing) {
+ if (er) {
+ console.error(er.message)
+ process.exit(er.errno || 127)
+ }
+ console.log(thing)
+})
diff --git a/deps/npm/node_modules/which/package.json b/deps/npm/node_modules/which/package.json
new file mode 100644
index 0000000000..02990697f7
--- /dev/null
+++ b/deps/npm/node_modules/which/package.json
@@ -0,0 +1,17 @@
+{
+ "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
+ "name": "which",
+ "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.",
+ "version": "1.0.2",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/node-which.git"
+ },
+ "main": "which.js",
+ "bin": "./bin/which",
+ "engines": {
+ "node": "*"
+ },
+ "dependencies": {},
+ "devDependencies": {}
+}
diff --git a/deps/npm/node_modules/which/which.js b/deps/npm/node_modules/which/which.js
new file mode 100644
index 0000000000..b124ead672
--- /dev/null
+++ b/deps/npm/node_modules/which/which.js
@@ -0,0 +1,67 @@
+module.exports = which
+which.sync = whichSync
+
+var path = require("path")
+ , fs
+ , COLON = process.platform === "win32" ? ";" : ":"
+
+try {
+ fs = require("graceful-fs")
+} catch (ex) {
+ fs = require("fs")
+}
+
+// console.log(process.execPath)
+// console.log(process.argv)
+
+function isExe (mod, uid, gid) {
+ //console.error("isExe?", (mod & 0111).toString(8))
+ var ret = (mod & 0001)
+ || (mod & 0010) && process.getgid && gid === process.getgid()
+ || (mod & 0100) && process.getuid && uid === process.getuid()
+ //console.error("isExe?", ret)
+ return ret
+}
+function which (cmd, cb) {
+ if (cmd.charAt(0) === "/") return cb(null, cmd)
+ var pathEnv = (process.env.PATH || "").split(COLON)
+ , pathExt = [""]
+ if (process.platform === "win32") {
+ pathEnv.push(process.cwd())
+ pathExt = (process.env.PATHEXT || ".EXE").split(COLON)
+ }
+ //console.error("pathEnv", pathEnv)
+ ;(function F (i, l) {
+ if (i === l) return cb(new Error("not found: "+cmd))
+ var p = path.resolve(pathEnv[i], cmd)
+ ;(function E (ii, ll) {
+ if (ii === ll) return F(i + 1, l)
+ var ext = pathExt[ii]
+ //console.error(p + ext)
+ fs.stat(p + ext, function (er, stat) {
+ if (!er &&
+ stat &&
+ stat.isFile() &&
+ isExe(stat.mode, stat.uid, stat.gid)) {
+ //console.error("yes, exe!", p + ext)
+ return cb(null, p + ext)
+ }
+ return E(ii + 1, ll)
+ })
+ })(0, pathExt.length)
+ })(0, pathEnv.length)
+}
+
+
+function whichSync (cmd) {
+ if (cmd.charAt(0) === "/") return cmd
+ var pathEnv = (process.env.PATH || "").split(COLON)
+ for (var i = 0, l = pathEnv.length; i < l; i ++) {
+ var p = path.join(pathEnv[i], cmd)
+ if (p === process.execPath) return p
+ var stat
+ try { stat = fs.statSync(p) } catch (ex) {}
+ if (stat && isExe(stat.mode, stat.uid, stat.gid)) return p
+ }
+ throw new Error("not found: "+cmd)
+}
diff --git a/deps/npm/package.json b/deps/npm/package.json
new file mode 100644
index 0000000000..3060e4d077
--- /dev/null
+++ b/deps/npm/package.json
@@ -0,0 +1,77 @@
+{ "name": "npm"
+, "publishConfig": { "tag": "alpha" }
+, "description": "A package manager for node"
+, "keywords": [ "package manager", "modules", "install", "package.json" ]
+, "version": "1.1.0-alpha"
+, "preferGlobal": true
+, "config": { "publishtest": false }
+, "homepage": "http://npmjs.org/"
+, "author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)"
+, "repository":
+ { "type": "git"
+ , "url": "https://github.com/isaacs/npm"
+ }
+, "bugs":
+ { "email": "npm-@googlegroups.com"
+ , "url": "http://github.com/isaacs/npm/issues"
+ }
+, "directories": { "doc": "./doc"
+ , "man": "./man"
+ , "lib": "./lib"
+ , "bin": "./bin"
+ }
+, "main": "./lib/npm.js"
+, "bin": { "npm": "./bin/npm-cli.js"
+ , "npm_g": "./bin/npm-cli.js"
+ , "npm-g": "./bin/npm-cli.js" }
+, "dependencies":
+ { "semver": "1"
+ , "ini": "1"
+ , "slide": "1"
+ , "abbrev": "1"
+ , "graceful-fs": "1"
+ , "minimatch": "0"
+ , "nopt": "1"
+ , "node-uuid": "1.2"
+ , "proto-list": "1"
+ , "rimraf": "1"
+ , "request": "~2.1.1"
+ , "which": "1"
+ , "tar": "0"
+ , "fstream": "0"
+ , "block-stream": "*"
+ , "inherits": "1"
+ , "mkdirp": "0.1"
+ }
+, "bundleDependencies":
+ [ "slide"
+ , "ini"
+ , "semver"
+ , "abbrev"
+ , "graceful-fs"
+ , "minimatch"
+ , "nopt"
+ , "node-uuid"
+ , "rimraf"
+ , "request"
+ , "proto-list"
+ , "which"
+ , "tar"
+ , "fstream"
+ , "block-stream"
+ , "inherits"
+ , "mkdirp"
+ ]
+, "devDependencies":
+ { "ronn": "https://github.com/isaacs/ronnjs/tarball/master" }
+, "engines": { "node": "0.6 || 0.7 || 0.8", "npm": "1" }
+, "scripts": { "test": "./test/run"
+ , "prepublish": "make -j4 doc"
+ , "dumpconf": "env | grep npm | sort | uniq"
+ }
+, "licenses":
+ [ { "type": "MIT +no-false-attribs"
+ , "url": "http://github.com/isaacs/npm/raw/master/LICENSE"
+ }
+ ]
+}
diff --git a/deps/npm/scripts/clean-old.sh b/deps/npm/scripts/clean-old.sh
new file mode 100644
index 0000000000..cda80f2f48
--- /dev/null
+++ b/deps/npm/scripts/clean-old.sh
@@ -0,0 +1,165 @@
+#!/bin/bash
+
+# look for old 0.x cruft, and get rid of it.
+# Should already be sitting in the npm folder.
+
+# This doesn't have to be quite as cross-platform as install.sh.
+# There are some bash-isms, because maintaining *two*
+# fully-portable posix/bourne sh scripts is too much for
+# one project with a sane maintainer.
+
+# If readlink isn't available, then this is just too tricky.
+# However, greadlink is fine, so Solaris can join the party, too.
+readlink="readlink"
+which $readlink >/dev/null 2>/dev/null
+if [ $? -ne 0 ]; then
+ readlink="greadlink"
+ which $readlink >/dev/null 2>/dev/null
+ if [ $? -ne 0 ]; then
+ echo "Can't find the readlink or greadlink command. Aborting."
+ exit 1
+ fi
+fi
+
+if [ "x$npm_config_prefix" != "x" ]; then
+ PREFIXES=$npm_config_prefix
+else
+ node="$NODE"
+ if [ "x$node" = "x" ]; then
+ node=`which node`
+ fi
+ if [ "x$node" = "x" ]; then
+ echo "Can't find node to determine prefix. Aborting."
+ exit 1
+ fi
+
+
+ PREFIX=`dirname $node`
+ PREFIX=`dirname $PREFIX`
+ echo "cleanup prefix=$PREFIX"
+ PREFIXES=$PREFIX
+
+ altprefix=`"$node" -e process.installPrefix`
+ if [ "x$altprefix" != "x" ] && [ "x$altprefix" != "x$PREFIX" ]; then
+ echo "altprefix=$altprefix"
+ PREFIXES="$PREFIX $altprefix"
+ fi
+fi
+
+# now prefix is where npm would be rooted by default
+# go hunting.
+
+packages=
+for prefix in $PREFIXES; do
+ packages="$packages
+ "`ls "$prefix"/lib/node/.npm 2>/dev/null | grep -v .cache`
+done
+
+packages=`echo $packages`
+
+filelist=()
+fid=0
+
+for prefix in $PREFIXES; do
+ # remove any links into the .npm dir, or links to
+ # version-named shims/symlinks.
+ for folder in share/man bin lib/node; do
+ find $prefix/$folder -type l | while read file; do
+ target=`$readlink $file | grep '/\.npm/'`
+ if [ "x$target" != "x" ]; then
+ # found one!
+ filelist[$fid]="$file"
+ let 'fid++'
+ # also remove any symlinks to this file.
+ base=`basename "$file"`
+ base=`echo "$base" | awk -F@ '{print $1}'`
+ if [ "x$base" != "x" ]; then
+ find "`dirname $file`" -type l -name "$base"'*' \
+ | while read l; do
+ target=`$readlink "$l" | grep "$base"`
+ if [ "x$target" != "x" ]; then
+ filelist[$fid]="$1"
+ let 'fid++'
+ fi
+ done
+ fi
+ fi
+ done
+
+ # Scour for shim files. These are relics of 0.2 npm installs.
+ # note: grep -r is not portable.
+ find $prefix/$folder -type f \
+ | xargs grep -sl '// generated by npm' \
+ | while read file; do
+ filelist[$fid]="$file"
+ let 'fid++'
+ done
+ done
+
+ # now remove the package modules, and the .npm folder itself.
+ if [ "x$packages" != "x" ]; then
+ for pkg in $packages; do
+ filelist[$fid]="$prefix/lib/node/$pkg"
+ let 'fid++'
+ for i in $prefix/lib/node/$pkg\@*; do
+ filelist[$fid]="$i"
+ let 'fid++'
+ done
+ done
+ fi
+
+ for folder in lib/node/.npm lib/npm share/npm; do
+ if [ -d $prefix/$folder ]; then
+ filelist[$fid]="$prefix/$folder"
+ let 'fid++'
+ fi
+ done
+done
+
+# now actually clean, but only if there's anything TO clean
+if [ "${#filelist[@]}" -gt 0 ]; then
+ echo ""
+ echo "This script will find and eliminate any shims, symbolic"
+ echo "links, and other cruft that was installed by npm 0.x."
+ echo ""
+
+ if [ "x$packages" != "x" ]; then
+ echo "The following packages appear to have been installed with"
+ echo "an old version of npm, and will be removed forcibly:"
+ for pkg in $packages; do
+ echo " $pkg"
+ done
+ echo "Make a note of these. You may want to install them"
+ echo "with npm 1.0 when this process is completed."
+ echo ""
+ fi
+
+ OK=
+ if [ "x$1" = "x-y" ]; then
+ OK="yes"
+ fi
+
+ while [ "$OK" != "y" ] && [ "$OK" != "yes" ] && [ "$OK" != "no" ]; do
+ echo "Is this OK?"
+ echo " enter 'yes' or 'no'"
+ echo " or 'show' to see a list of files "
+ read OK
+ if [ "x$OK" = "xshow" ] || [ "x$OK" = "xs" ]; then
+ for i in "${filelist[@]}"; do
+ echo "$i"
+ done
+ fi
+ done
+ if [ "$OK" = "no" ]; then
+ echo "Aborting"
+ exit 1
+ fi
+ for i in "${filelist[@]}"; do
+ rm -rf "$i"
+ done
+fi
+
+echo ""
+echo 'All clean!'
+
+exit 0
diff --git a/deps/npm/scripts/doc-build.sh b/deps/npm/scripts/doc-build.sh
new file mode 100755
index 0000000000..6c32ea1838
--- /dev/null
+++ b/deps/npm/scripts/doc-build.sh
@@ -0,0 +1,71 @@
+#!/bin/bash
+
+if [[ $DEBUG != "" ]]; then
+ set -x
+fi
+set -o errexit
+set -o pipefail
+
+if ! [ -x node_modules/.bin/ronn ]; then
+ if [ -f .building_ronn ]; then
+ while [ -f .building_ronn ]; do
+ sleep 1
+ done
+ else
+ # a race to see which make process will be the one to install ronn
+ echo $$ > .building_ronn
+ sleep 1
+ if [ $(cat .building_ronn) == $$ ]; then
+ make node_modules/ronn
+ rm .building_ronn
+ else
+ while [ -f .building_ronn ]; do
+ sleep 1
+ done
+ fi
+ fi
+fi
+
+src=$1
+dest=$2
+name=$(basename ${src%.*})
+date=$(date -u +'%Y-%M-%d %H:%m:%S')
+version=$(node cli.js -v)
+
+mkdir -p $(dirname $dest)
+
+case $dest in
+ *.[13])
+ ./node_modules/.bin/ronn --roff $src \
+ | sed "s|@VERSION@|$version|g" \
+ | perl -pi -e 's/npm\\-([^\(]*)\(1\)/npm help \1/g' \
+ | perl -pi -e 's/npm\\-([^\(]*)\(3\)/npm apihelp \1/g' \
+ | perl -pi -e 's/npm\(1\)/npm help npm/g' \
+ | perl -pi -e 's/npm\(3\)/npm apihelp npm/g' \
+ > $dest
+ exit $?
+ ;;
+ *.html)
+ (cat html/dochead.html && \
+ ./node_modules/.bin/ronn -f $src && \
+ cat html/docfoot.html )\
+ | sed "s|@NAME@|$name|g" \
+ | sed "s|@DATE@|$date|g" \
+ | sed "s|@VERSION@|$version|g" \
+ | perl -pi -e 's/<h1>npm(-?[^\(]*\([0-9]\)) -- (.*?)<\/h1>/<h1>npm\1<\/h1> <p>\2<\/p>/g' \
+ | perl -pi -e 's/npm-npm/npm/g' \
+ | perl -pi -e 's/([^"-])(npm-)?README(\(1\))?/\1<a href="..\/doc\/README.html">README<\/a>/g' \
+ | perl -pi -e 's/<title><a href="..\/doc\/README.html">README<\/a><\/title>/<title>README<\/title>/g' \
+ | perl -pi -e 's/([^"-])npm-([^\(]+)(\(1\))/\1<a href="..\/doc\/\2.html">\2\3<\/a>/g' \
+ | perl -pi -e 's/([^"-])npm-([^\(]+)(\(3\))/\1<a href="..\/api\/\2.html">\2\3<\/a>/g' \
+ | perl -pi -e 's/([^"-])npm\(1\)/\1<a href="..\/doc\/npm.html">npm(1)<\/a>/g' \
+ | perl -pi -e 's/([^"-])npm\(3\)/\1<a href="..\/api\/npm.html">npm(3)<\/a>/g' \
+ | perl -pi -e 's/\([13]\)<\/a><\/h1>/<\/a><\/h1>/g' \
+ > $dest
+ exit $?
+ ;;
+ *)
+ echo "Invalid destination type: $dest" >&2
+ exit 1
+ ;;
+esac
diff --git a/deps/npm/scripts/index-build.js b/deps/npm/scripts/index-build.js
new file mode 100644
index 0000000000..b3c19a03a1
--- /dev/null
+++ b/deps/npm/scripts/index-build.js
@@ -0,0 +1,62 @@
+#!/usr/bin/env node
+var fs = require("fs")
+ , path = require("path")
+ , cli = path.resolve(__dirname, "..", "doc", "cli")
+ , clidocs = null
+ , api = path.resolve(__dirname, "..", "doc", "api")
+ , apidocs = null
+ , readme = path.resolve(__dirname, "..", "README.md")
+
+fs.readdir(cli, done("cli"))
+fs.readdir(api, done("api"))
+
+function done (which) { return function (er, docs) {
+ if (er) throw er
+ if (which === "api") apidocs = docs
+ else clidocs = docs
+
+ if (apidocs && clidocs) next()
+}}
+
+function filter (d) {
+ return d !== "index.md"
+ && d.charAt(0) !== "."
+ && d.match(/\.md$/)
+}
+
+function next () {
+ console.log(
+ "npm-index(1) -- Index of all npm documentation\n" +
+ "==============================================\n")
+
+ apidocs = apidocs.filter(filter).map(function (d) {
+ return [3, path.resolve(api, d)]
+ })
+
+ clidocs = clidocs.filter(filter).map(function (d) {
+ return [1, path.resolve(cli, d)]
+ })
+
+ writeLine([1, readme])
+
+ console.log("# Command Line Documentation")
+
+ clidocs.forEach(writeLine)
+
+ console.log("# API Documentation")
+ apidocs.forEach(writeLine)
+}
+
+function writeLine (sd) {
+ var sxn = sd[0]
+ , doc = sd[1]
+ , d = path.basename(doc, ".md")
+ , s = fs.lstatSync(doc)
+
+ if (s.isSymbolicLink()) return
+
+ var content = fs.readFileSync(doc, "utf8").split("\n")[0].split("--")[1]
+
+ console.log("## npm-%s(%d)\n", d, sxn)
+ console.log(content + "\n")
+}
diff --git a/deps/npm/scripts/install.sh b/deps/npm/scripts/install.sh
new file mode 100644
index 0000000000..16fbfe618b
--- /dev/null
+++ b/deps/npm/scripts/install.sh
@@ -0,0 +1,282 @@
+#!/bin/sh
+
+# A word about this shell script:
+#
+# It must work everywhere, including on systems that lack
+# a /bin/bash, map 'sh' to ksh, ksh97, bash, ash, or zsh,
+# and potentially have either a posix shell or bourne
+# shell living at /bin/sh.
+#
+# See this helpful document on writing portable shell scripts:
+# http://www.gnu.org/s/hello/manual/autoconf/Portable-Shell.html
+#
+# The only shell it won't ever work on is cmd.exe.
+
+if [ "x$0" = "xsh" ]; then
+ # run as curl | sh
+ # on some systems, you can just do cat>npm-install.sh
+ # which is a bit cuter. But on others, &1 is already closed,
+ # so catting to another script file won't do anything.
+ curl -s http://npmjs.org/install.sh > npm-install-$$.sh
+ sh npm-install-$$.sh
+ ret=$?
+ rm npm-install-$$.sh
+ exit $ret
+fi
+
+# See what "npm_config_*" things there are in the env,
+# and make them permanent.
+# If this fails, it's not such a big deal.
+configures="`env | grep 'npm_config_' | sed -e 's|^npm_config_||g'`"
+
+npm_config_loglevel="error"
+if [ "x$npm_debug" = "x" ]; then
+ (exit 0)
+else
+ echo "Running in debug mode."
+ echo "Note that this requires bash or zsh."
+ set -o xtrace
+ set -o pipefail
+ npm_config_loglevel="verbose"
+fi
+export npm_config_loglevel
+
+# make sure that node exists
+node=`which node 2>&1`
+ret=$?
+if [ $ret -eq 0 ] && [ -x "$node" ]; then
+ (exit 0)
+else
+ echo "npm cannot be installed without nodejs." >&2
+ echo "Install node first, and then try again." >&2
+ echo "" >&2
+ echo "Maybe node is installed, but not in the PATH?" >&2
+ echo "Note that running as sudo can change envs." >&2
+ echo ""
+ echo "PATH=$PATH" >&2
+ exit $ret
+fi
+
+# set the temp dir
+TMP="${TMPDIR}"
+if [ "x$TMP" = "x" ]; then
+ TMP="/tmp"
+fi
+TMP="${TMP}/npm.$$"
+rm -rf "$TMP" || true
+mkdir "$TMP"
+if [ $? -ne 0 ]; then
+ echo "failed to mkdir $TMP" >&2
+ exit 1
+fi
+
+BACK="$PWD"
+
+ret=0
+tar="${TAR}"
+if [ -z "$tar" ]; then
+ tar="${npm_config_tar}"
+fi
+if [ -z "$tar" ]; then
+ tar=`which tar 2>&1`
+ ret=$?
+fi
+
+if [ $ret -eq 0 ] && [ -x "$tar" ]; then
+ echo "tar=$tar"
+ echo "version:"
+ $tar --version
+ ret=$?
+fi
+
+if [ $ret -eq 0 ]; then
+ (exit 0)
+else
+ echo "No suitable tar program found."
+ exit 1
+fi
+
+
+
+# Try to find a suitable make
+# If the MAKE environment var is set, use that.
+# otherwise, try to find gmake, and then make.
+# If no make is found, then just execute the necessary commands.
+
+# XXX For some reason, make is building all the docs every time. This
+# is an annoying source of bugs. Figure out why this happens.
+MAKE=NOMAKE
+
+if [ "x$MAKE" = "x" ]; then
+ make=`which gmake 2>&1`
+ if [ $? -eq 0 ] && [ -x $make ]; then
+ (exit 0)
+ else
+ make=`which make 2>&1`
+ if [ $? -eq 0 ] && [ -x $make ]; then
+ (exit 0)
+ else
+ make=NOMAKE
+ fi
+ fi
+else
+ make="$MAKE"
+fi
+
+if [ -x "$make" ]; then
+ (exit 0)
+else
+ # echo "Installing without make. This may fail." >&2
+ make=NOMAKE
+fi
+
+# If there's no bash, then don't even try to clean
+if [ -x "/bin/bash" ]; then
+ (exit 0)
+else
+ clean="no"
+fi
+
+t="${npm_install}"
+if [ -z "$t" ]; then
+ t="latest"
+fi
+
+# the npmca cert
+cacert='
+-----BEGIN CERTIFICATE-----
+MIIChzCCAfACCQDauvz/KHp8ejANBgkqhkiG9w0BAQUFADCBhzELMAkGA1UEBhMC
+VVMxCzAJBgNVBAgTAkNBMRAwDgYDVQQHEwdPYWtsYW5kMQwwCgYDVQQKEwNucG0x
+IjAgBgNVBAsTGW5wbSBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkxDjAMBgNVBAMTBW5w
+bUNBMRcwFQYJKoZIhvcNAQkBFghpQGl6cy5tZTAeFw0xMTA5MDUwMTQ3MTdaFw0y
+MTA5MDIwMTQ3MTdaMIGHMQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExEDAOBgNV
+BAcTB09ha2xhbmQxDDAKBgNVBAoTA25wbTEiMCAGA1UECxMZbnBtIENlcnRpZmlj
+YXRlIEF1dGhvcml0eTEOMAwGA1UEAxMFbnBtQ0ExFzAVBgkqhkiG9w0BCQEWCGlA
+aXpzLm1lMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDLI4tIqPpRW+ACw9GE
+OgBlJZwK5f8nnKCLK629Pv5yJpQKs3DENExAyOgDcyaF0HD0zk8zTp+ZsLaNdKOz
+Gn2U181KGprGKAXP6DU6ByOJDWmTlY6+Ad1laYT0m64fERSpHw/hjD3D+iX4aMOl
+y0HdbT5m1ZGh6SJz3ZqxavhHLQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAC4ySDbC
+l7W1WpLmtLGEQ/yuMLUf6Jy/vr+CRp4h+UzL+IQpCv8FfxsYE7dhf/bmWTEupBkv
+yNL18lipt2jSvR3v6oAHAReotvdjqhxddpe5Holns6EQd1/xEZ7sB1YhQKJtvUrl
+ZNufy1Jf1r0ldEGeA+0ISck7s+xSh9rQD2Op
+-----END CERTIFICATE-----
+'
+
+echo "$cacert" > "$TMP/cafile.crt"
+cacert="$TMP/cafile.crt"
+
+# need to echo "" after, because Posix sed doesn't treat EOF
+# as an implied end of line.
+url=`(curl -SsL --cacert "$cacert" https://registry.npmjs.org/npm/$t; echo "") \
+ | sed -e 's/^.*tarball":"//' \
+ | sed -e 's/".*$//'`
+
+ret=$?
+if [ "x$url" = "x" ]; then
+ ret=125
+ # try without the -e arg to sed.
+ url=`(curl -SsL --cacert "$cacert" https://registry.npmjs.org/npm/$t; echo "") \
+ | sed 's/^.*tarball":"//' \
+ | sed 's/".*$//'`
+ ret=$?
+ if [ "x$url" = "x" ]; then
+ ret=125
+ fi
+fi
+if [ $ret -ne 0 ]; then
+ echo "Failed to get tarball url for npm/$t" >&2
+ exit $ret
+fi
+
+
+echo "fetching: $url" >&2
+
+cd "$TMP" \
+ && curl -SsL --cacert "$cacert" "$url" \
+ | $tar -xzf - \
+ && rm "$cacert" \
+ && cd "$TMP"/* \
+ && (node_version=`"$node" --version 2>&1`
+ ret=$?
+ if [ $ret -eq 0 ]; then
+ req=`"$node" bin/read-package-json.js package.json engines.node`
+ if [ -d node_modules ]; then
+ "$node" node_modules/semver/bin/semver -v "$node_version" -r "$req"
+ ret=$?
+ else
+ "$node" bin/semver.js -v "$node_version" -r "$req"
+ ret=$?
+ fi
+ fi
+ if [ $ret -ne 0 ]; then
+ echo "You need node $req to run this program." >&2
+ echo "node --version reports: $node_version" >&2
+ echo "Please upgrade node before continuing."
+ exit $ret
+ fi) \
+ && (ver=`"$node" bin/read-package-json.js package.json version`
+ isnpm10=0
+ if [ $ret -eq 0 ]; then
+ req=`"$node" bin/read-package-json.js package.json engines.node`
+ if [ -d node_modules ]; then
+ if "$node" node_modules/semver/bin/semver -v "$ver" -r "1"
+ then
+ isnpm10=1
+ fi
+ else
+ if "$node" bin/semver -v "$ver" -r ">=1.0"; then
+ isnpm10=1
+ fi
+ fi
+ fi
+
+ ret=0
+ if [ $isnpm10 -eq 1 ] && [ -f "scripts/clean-old.sh" ]; then
+ if [ "x$skipclean" = "x" ]; then
+ (exit 0)
+ else
+ clean=no
+ fi
+ if [ "x$clean" = "xno" ] \
+ || [ "x$clean" = "xn" ]; then
+ echo "Skipping 0.x cruft clean" >&2
+ ret=0
+ elif [ "x$clean" = "xy" ] || [ "x$clean" = "xyes" ]; then
+ NODE="$node" /bin/bash "scripts/clean-old.sh" "-y"
+ ret=$?
+ else
+ NODE="$node" /bin/bash "scripts/clean-old.sh" </dev/tty
+ ret=$?
+ fi
+ fi
+
+ if [ $ret -ne 0 ]; then
+ echo "Aborted 0.x cleanup. Exiting." >&2
+ exit $ret
+ fi) \
+ && (if [ "x$configures" = "x" ]; then
+ (exit 0)
+ else
+ echo "./configure "$configures
+ echo "$configures" > npmrc
+ fi) \
+ && (if [ "$make" = "NOMAKE" ]; then
+ (exit 0)
+ elif "$make" uninstall install; then
+ (exit 0)
+ else
+ make="NOMAKE"
+ fi
+ if [ "$make" = "NOMAKE" ]; then
+ "$node" cli.js rm npm -gf
+ "$node" cli.js install -gf
+ fi) \
+ && cd "$BACK" \
+ && rm -rf "$TMP" \
+ && echo "It worked"
+
+ret=$?
+if [ $ret -ne 0 ]; then
+ echo "It failed" >&2
+fi
+exit $ret
diff --git a/deps/npm/test/common.js b/deps/npm/test/common.js
new file mode 100644
index 0000000000..2755056b1b
--- /dev/null
+++ b/deps/npm/test/common.js
@@ -0,0 +1,7 @@
+
+// whatever, it's just tests.
+;["util","assert"].forEach(function (thing) {
+ thing = require("thing")
+ for (var i in thing) global[i] = thing[i]
+}
+
diff --git a/deps/npm/test/disabled/bundlerecurs/package.json b/deps/npm/test/disabled/bundlerecurs/package.json
new file mode 100644
index 0000000000..d870411706
--- /dev/null
+++ b/deps/npm/test/disabled/bundlerecurs/package.json
@@ -0,0 +1,4 @@
+{ "name" : "bundletest"
+, "version" : "1.0.0"
+, "dependencies" : { "bundletest" : "*" }
+}
diff --git a/deps/npm/test/disabled/failer/package.json b/deps/npm/test/disabled/failer/package.json
new file mode 100644
index 0000000000..e1f8e946b7
--- /dev/null
+++ b/deps/npm/test/disabled/failer/package.json
@@ -0,0 +1,5 @@
+{ "name" : "failer"
+, "version" : "9999.999.99"
+, "dependencies" : { "base64" : "*" }
+, "scripts" : { "preinstall" : "exit 1" }
+}
diff --git a/deps/npm/test/disabled/fast/package.json b/deps/npm/test/disabled/fast/package.json
new file mode 100644
index 0000000000..fbf26e9b14
--- /dev/null
+++ b/deps/npm/test/disabled/fast/package.json
@@ -0,0 +1,9 @@
+{ "name" : "fast"
+, "description" : "does nothing, and not very fast"
+, "version" : "1.2.3"
+, "scripts" :
+{ "preinstall" : "sleep 1 && echo fast 1 $(date +%s) && echo fast 2"
+, "install" : "sleep 1 && echo fast 2 $(date +%s) && echo fast 3"
+, "postinstall" : "sleep 1 && echo fast 3 $(date +%s) && echo fast 4"
+}
+}
diff --git a/deps/npm/test/disabled/package-config/package.json b/deps/npm/test/disabled/package-config/package.json
new file mode 100644
index 0000000000..7ec97d3805
--- /dev/null
+++ b/deps/npm/test/disabled/package-config/package.json
@@ -0,0 +1,4 @@
+{"name":"package-config"
+,"version":"1.2.3"
+,"config":{"foo":"bar"}
+,"scripts":{"test":"./test.js"}}
diff --git a/deps/npm/test/disabled/package-config/test.js b/deps/npm/test/disabled/package-config/test.js
new file mode 100755
index 0000000000..7337b237b5
--- /dev/null
+++ b/deps/npm/test/disabled/package-config/test.js
@@ -0,0 +1,17 @@
+#!/usr/bin/env node
+
+var env = process.env
+ , orig = require(process.env.npm_package_name+"/package.json").config
+ , assert = require("assert")
+
+console.log("Before running this test, do:\n"
+ +" npm config set package-config:foo boo\n"
+ +"or else it's about to fail.")
+assert.equal(env.npm_package_config_foo, "boo", "foo != boo")
+assert.equal(orig.foo, "bar", "original foo != bar")
+assert.equal(env["npm_config_package-config:foo"], "boo",
+ "package-config:foo != boo")
+console.log({ foo: env.npm_package_config_foo
+ , orig_foo: orig.foo
+ , "package-config:foo": env["npm_config_package-config:foo"]
+ })
diff --git a/deps/npm/test/disabled/slow/package.json b/deps/npm/test/disabled/slow/package.json
new file mode 100644
index 0000000000..ba6be42fa9
--- /dev/null
+++ b/deps/npm/test/disabled/slow/package.json
@@ -0,0 +1,9 @@
+{ "name" : "slow"
+, "description" : "just like fast, but even slower"
+, "version" : "1.2.3"
+, "scripts" :
+ { "preinstall" : "sleep 1 && echo slow 1 $(date +%s) && sleep 1 && echo slow 2 $(date +%s)"
+ , "install" : "sleep 1 && echo slow 2 $(date +%s) && sleep 1 && echo slow 3 $(date +%s)"
+ , "postinstall" : "sleep 1 && echo slow 3 $(date +%s) && sleep 1 && echo slow 4 $(date +%s)"
+ }
+}
diff --git a/deps/npm/test/disabled/startstop/package.json b/deps/npm/test/disabled/startstop/package.json
new file mode 100644
index 0000000000..bee2a2fd3a
--- /dev/null
+++ b/deps/npm/test/disabled/startstop/package.json
@@ -0,0 +1,3 @@
+{"name":"startstop"
+,"version":"1.2.3"
+,"scripts":{"start":"echo 'start'","stop":"echo 'stop'"}}
diff --git a/deps/npm/test/packages/npm-test-blerg/package.json b/deps/npm/test/packages/npm-test-blerg/package.json
new file mode 100644
index 0000000000..374b4432b2
--- /dev/null
+++ b/deps/npm/test/packages/npm-test-blerg/package.json
@@ -0,0 +1,4 @@
+{ "name":"npm-test-blerg"
+, "version" : "0.0.0"
+, "scripts" : { "test" : "node test.js" }
+}
diff --git a/deps/npm/test/packages/npm-test-blerg/test.js b/deps/npm/test/packages/npm-test-blerg/test.js
new file mode 100644
index 0000000000..f548458ac0
--- /dev/null
+++ b/deps/npm/test/packages/npm-test-blerg/test.js
@@ -0,0 +1,5 @@
+
+var assert = require("assert")
+assert.equal(undefined, process.env.npm_config__password, "password exposed!")
+assert.equal(undefined, process.env.npm_config__auth, "auth exposed!")
+assert.equal(undefined, process.env.npm_config__authCrypt, "authCrypt exposed!")
diff --git a/deps/npm/test/packages/npm-test-env-reader/package.json b/deps/npm/test/packages/npm-test-env-reader/package.json
new file mode 100644
index 0000000000..ddd5c91abd
--- /dev/null
+++ b/deps/npm/test/packages/npm-test-env-reader/package.json
@@ -0,0 +1,14 @@
+{ "name":"npm-test-env-reader"
+, "version" : "1.2.3"
+, "scripts" :
+ { "install" : "./test.sh"
+ , "preinstall" : "./test.sh"
+ , "preuninstall" : "./test.sh"
+ , "postuninstall" : "./test.sh"
+ , "test" : "./test.sh"
+ , "stop" : "./test.sh"
+ , "start" : "./test.sh"
+ , "restart" : "./test.sh"
+ , "foo" : "./test.sh"
+ }
+}
diff --git a/deps/npm/test/packages/npm-test-env-reader/test.sh b/deps/npm/test/packages/npm-test-env-reader/test.sh
new file mode 100755
index 0000000000..b4ca4374ed
--- /dev/null
+++ b/deps/npm/test/packages/npm-test-env-reader/test.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env sh
+env | grep npm | sort | uniq
+echo PATH=$PATH
diff --git a/deps/npm/test/packages/npm-test-missing-bindir/package.json b/deps/npm/test/packages/npm-test-missing-bindir/package.json
new file mode 100644
index 0000000000..49e26742df
--- /dev/null
+++ b/deps/npm/test/packages/npm-test-missing-bindir/package.json
@@ -0,0 +1,4 @@
+{ "name":"npm-test-missing-bindir"
+, "version" : "0.0.0"
+, "scripts" : { "test" : "node test.js" }
+, "directories": { "bin" : "./not-found" } }
diff --git a/deps/npm/test/packages/npm-test-missing-bindir/test.js b/deps/npm/test/packages/npm-test-missing-bindir/test.js
new file mode 100644
index 0000000000..f548458ac0
--- /dev/null
+++ b/deps/npm/test/packages/npm-test-missing-bindir/test.js
@@ -0,0 +1,5 @@
+
+var assert = require("assert")
+assert.equal(undefined, process.env.npm_config__password, "password exposed!")
+assert.equal(undefined, process.env.npm_config__auth, "auth exposed!")
+assert.equal(undefined, process.env.npm_config__authCrypt, "authCrypt exposed!")
diff --git a/deps/npm/test/packages/npm-test-private/package.json b/deps/npm/test/packages/npm-test-private/package.json
new file mode 100644
index 0000000000..3d95a37af1
--- /dev/null
+++ b/deps/npm/test/packages/npm-test-private/package.json
@@ -0,0 +1,4 @@
+{"name":"npm-test-private"
+,"version":"9.9.9-9"
+,"homepage":"http://www.youtube.com/watch?v=1MLry6Cn_D4"
+,"private":"true"}
diff --git a/deps/npm/test/packages/npm-test-test-package/package.json b/deps/npm/test/packages/npm-test-test-package/package.json
new file mode 100644
index 0000000000..c5c5aeabc1
--- /dev/null
+++ b/deps/npm/test/packages/npm-test-test-package/package.json
@@ -0,0 +1,5 @@
+{ "name":"npm-test-test-package"
+, "author" : "Testy McMock"
+, "version" : "1.2.3-99-b"
+, "description" : "This is a test package used for debugging. It has some random data and that's all."
+}
diff --git a/deps/npm/test/packages/npm-test-url-dep/package.json b/deps/npm/test/packages/npm-test-url-dep/package.json
new file mode 100644
index 0000000000..72a139e547
--- /dev/null
+++ b/deps/npm/test/packages/npm-test-url-dep/package.json
@@ -0,0 +1,4 @@
+{ "name":"npm-test-url-dep"
+, "version" : "1.2.3"
+, "dependencies" :
+ { "dnode" : "https://github.com/substack/dnode/tarball/master" } }
diff --git a/deps/npm/test/run b/deps/npm/test/run
new file mode 100755
index 0000000000..2443726db8
--- /dev/null
+++ b/deps/npm/test/run
@@ -0,0 +1,138 @@
+#!/bin/bash
+
+if [ "$DEBUG" != "" ]; then
+ set -x
+fi
+
+
+# the "npm" command is set to a custom function here so that we can
+# test the code in this repo, rather than whichever version of npm
+# happens to be installed.
+
+main () {
+ # setup
+ FAILURES=0
+
+ cd "$TESTDIR"
+
+ npm config ls
+
+ # install
+ npm install "$NPMPKG" || exit 1
+
+ # used in test later
+ npm config set package-config:foo boo || exit 1
+
+ npm install $( ls packages | awk '{print "packages/" $1 }' ) || exit 1
+ (ls packages | while read pkg; do
+ npm test "$pkg"
+ done) || exit 1
+ if [ "$FAILURES" == "0" ]; then
+ npm rm $(ls packages) npm || exit 1
+ fi
+ cleanup
+
+ if ! [ "$npm_package_config_publishtest" == "true" ]; then
+ echo_err "To test publishing: npm config set npm:publishtest true"
+ else
+ # attempt to publish and unpublish each of them.
+ npm install "$NPMPKG" || exit 1
+
+ (ls packages | grep -v 'npm-test-private' | while read pkg; do
+ npm publish packages/$pkg || exit 1
+ npm install $pkg || exit 1
+ npm unpublish $pkg || exit 1
+ done) || exit 1
+
+ # verify that the private package can't be published
+ # bypass the test-harness npm function.
+ "$NPMCLI" publish packages/npm-test-private && (
+ npm unpublish npm-test-private
+ exit 1000
+ )
+ if [ $? -eq 1000 ]; then
+ fail "Private package shouldn't be publishable" >&2
+ fi
+
+ if [ "$FAILURES" == "0" ]; then
+ npm rm $(ls packages) npm || exit 1
+ fi
+ cleanup
+
+ fi
+
+ if [ $FAILURES -eq 0 ]; then
+ echo_err "ok"
+ rm -rf $TMP
+ else
+ echo_err "FAILED: $FAILURES"
+ fi
+ exit $FAILURES
+}
+
+
+
+####################
+# Test Harness below
+
+# fake functions
+npm () {
+ echo -e "npm $@"
+ "$NPMCLI" "$@" \
+ || fail npm "$@"
+}
+
+# get the absolute path of the executable
+SELF_PATH="$0"
+if [ "${SELF_PATH:0:1}" != "." ] && [ "${SELF_PATH:0:1}" != "/" ]; then
+ SELF_PATH=./"$SELF_PATH"
+fi
+SELF_PATH=$( cd -P -- "$(dirname -- "$SELF_PATH")" \
+ && pwd -P \
+ ) && SELF_PATH=$SELF_PATH/$(basename -- "$0")
+# resolve symlinks
+while [ -h "$SELF_PATH" ]; do
+ DIR=$(dirname -- "$SELF_PATH")
+ SYM=$(readlink -- "$SELF_PATH")
+ SELF_PATH=$( cd -- "$DIR" \
+ && cd -- $(dirname -- "$SYM") \
+ && pwd \
+ )/$(basename -- "$SYM")
+done
+NPMPKG="$(dirname -- "$(dirname -- "$SELF_PATH")")"
+NPMCLI="$NPMPKG/cli.js"
+TESTDIR="$NPMPKG/test/"
+TMP=${TMPDIR:-/tmp}
+rm -rf $TMP/npm*
+TMP=$TMP/npm-test-$$
+echo "Testing in $TMP ..."
+ROOTDIR="$TMP/root"
+
+cleanup () {
+ if [ "$FAILURES" != "0" ] && [ "$FAILURES" != "" ]; then
+ return
+ fi
+ [ -d "$ROOTDIR" ] && rm -rf -- "$ROOTDIR"
+ mkdir -p -- "$ROOTDIR"
+}
+
+export npm_config_prefix="$ROOTDIR"
+export npm_config_color="always"
+export npm_config_global=true
+# have to set this to false, or it'll try to test itself forever
+export npm_config_npat=false
+export PATH="$PATH":"$ROOTDIR/bin":"$ROOTDIR/node_modules/.bin"
+export NODE_PATH="$ROOTDIR/node_modules"
+
+echo_err () {
+ echo "$@" >&2
+}
+fail () {
+ let 'FAILURES += 1'
+ echo_err ""
+ echo_err -e "\033[33mFailure: $@\033[m"
+ exit 1
+}
+
+cleanup
+main
diff --git a/deps/npm/test/update-test.sh b/deps/npm/test/update-test.sh
new file mode 100755
index 0000000000..f72c90dd98
--- /dev/null
+++ b/deps/npm/test/update-test.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+
+SELF_PATH="$0"
+if [ "${SELF_PATH:0:1}" != "." ] && [ "${SELF_PATH:0:1}" != "/" ]; then
+ SELF_PATH=./"$SELF_PATH"
+fi
+SELF_PATH=$( cd -P -- "$(dirname -- "$SELF_PATH")" \
+ && pwd -P \
+ ) && SELF_PATH=$SELF_PATH/$(basename -- "$0")
+
+# resolve symlinks
+while [ -h "$SELF_PATH" ]; do
+ DIR=$(dirname -- "$SELF_PATH")
+ SYM=$(readlink -- "$SELF_PATH")
+ SELF_PATH=$( cd -- "$DIR" \
+ && cd -- $(dirname -- "$SYM") \
+ && pwd \
+ )/$(basename -- "$SYM")
+done
+DIR=$( dirname -- "$SELF_PATH" )
+
+export npm_config_root=$DIR/root
+export npm_config_binroot=$DIR/bin
+
+rm -rf $DIR/{root,bin}
+mkdir -p $DIR/root
+mkdir -p $DIR/bin
+npm ls installed 2>/dev/null | grep -v npm | awk '{print $1}' | xargs npm rm &>/dev/null
+npm install \
+ base64@1.0.0 \
+ eyes@0.1.1 \
+ vows@0.2.5 \
+ websocket-server@1.0.5 &>/dev/null
+npm install ./test/packages/blerg &>/dev/null
+npm install vows@0.3.0 &>/dev/null
+
+echo ""
+echo "##"
+echo "## starting update"
+echo "##"
+echo ""
+
+npm update
+
+echo ""
+echo "##"
+echo "## update done, all should be 'latest'"
+echo "##"
+echo ""
+
+list=$( npm ls installed remote 2>/dev/null )
+echo "$list"
+notlatest=$( echo "$list" | grep -v latest )
+if [ "$notlatest" != "" ]; then
+ echo "Failed: not latest"
+ echo $notlatest
+else
+ echo "ok"
+fi
diff --git a/tools/osx-pkg-postinstall.sh b/tools/osx-pkg-postinstall.sh
new file mode 100644
index 0000000000..c4c872fc78
--- /dev/null
+++ b/tools/osx-pkg-postinstall.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+# TODO Can this be done inside the .pmdoc?
+# TODO Can we extract $PREFIX from the installer?
+cd /usr/local/bin
+ln -sf ../lib/node_modules/npm/bin/npm-cli.js npm
diff --git a/tools/osx-pkg.pmdoc/01local-contents.xml b/tools/osx-pkg.pmdoc/01local-contents.xml
new file mode 100644
index 0000000000..bc1e5a791f
--- /dev/null
+++ b/tools/osx-pkg.pmdoc/01local-contents.xml
@@ -0,0 +1 @@
+<pkg-contents spec="1.12"/> \ No newline at end of file
diff --git a/tools/osx-pkg.pmdoc/01local.xml b/tools/osx-pkg.pmdoc/01local.xml
index a4b48a818b..18fd871248 100644
--- a/tools/osx-pkg.pmdoc/01local.xml
+++ b/tools/osx-pkg.pmdoc/01local.xml
@@ -1 +1 @@
-<pkgref spec="1.12" uuid="053587FE-BDF3-4EF5-815D-281427431048"><config><identifier>org.nodejs.pkg</identifier><version>1.0</version><description></description><post-install type="none"/><requireAuthorization/><installFrom relative="true" mod="true">../out/dist-osx/usr/local/</installFrom><installTo mod="true" relocatable="true">/usr/local</installTo><flags><followSymbolicLinks/></flags><packageStore type="internal"></packageStore><mod>installTo.isRelativeType</mod><mod>installTo</mod><mod>locationType</mod><mod>relocatable</mod><mod>installFrom.path</mod><mod>installTo.isAbsoluteType</mod><mod>identifier</mod><mod>parent</mod><mod>installTo.path</mod><mod>installFrom.isRelativeType</mod></config><contents><file-list>01local-contents.xml</file-list><filter>/CVS$</filter><filter>/\.svn$</filter><filter>/\.cvsignore$</filter><filter>/\.cvspass$</filter><filter>/\.DS_Store$</filter></contents></pkgref> \ No newline at end of file
+<pkgref spec="1.12" uuid="053587FE-BDF3-4EF5-815D-281427431048"><config><identifier>org.nodejs.pkg</identifier><version>1.0</version><description></description><post-install type="none"/><requireAuthorization/><installFrom relative="true" mod="true">../out/dist-osx/usr/local/</installFrom><installTo mod="true" relocatable="true">/usr/local</installTo><flags><followSymbolicLinks/></flags><packageStore type="internal"></packageStore><mod>installTo.isRelativeType</mod><mod>installTo</mod><mod>locationType</mod><mod>relocatable</mod><mod>installFrom.path</mod><mod>installTo.isAbsoluteType</mod><mod>identifier</mod><mod>parent</mod><mod>installTo.path</mod><mod>installFrom.isRelativeType</mod></config></pkgref> \ No newline at end of file
diff --git a/tools/osx-pkg.pmdoc/02npm-contents.xml b/tools/osx-pkg.pmdoc/02npm-contents.xml
new file mode 100644
index 0000000000..bc1e5a791f
--- /dev/null
+++ b/tools/osx-pkg.pmdoc/02npm-contents.xml
@@ -0,0 +1 @@
+<pkg-contents spec="1.12"/> \ No newline at end of file
diff --git a/tools/osx-pkg.pmdoc/02npm.xml b/tools/osx-pkg.pmdoc/02npm.xml
new file mode 100644
index 0000000000..f97de66e06
--- /dev/null
+++ b/tools/osx-pkg.pmdoc/02npm.xml
@@ -0,0 +1 @@
+<pkgref spec="1.12" uuid="DF0233A3-6B5D-4FBF-8048-8FC57F42278F"><config><identifier>org.nodejs.node.npm.pkg</identifier><version>1.0</version><description></description><post-install type="none"/><requireAuthorization/><installFrom relative="true">../deps/npm</installFrom><installTo mod="true">/usr/local/lib/node_modules/npm</installTo><flags><followSymbolicLinks/></flags><packageStore type="internal"></packageStore><mod>installTo.path</mod><mod>installFrom.isRelativeType</mod><mod>installTo.isAbsoluteType</mod><mod>scripts.postinstall.isRelativeType</mod><mod>parent</mod><mod>installTo</mod></config><scripts><postinstall relative="true" mod="true">osx-pkg-postinstall.sh</postinstall></scripts></pkgref> \ No newline at end of file
diff --git a/tools/osx-pkg.pmdoc/index.xml b/tools/osx-pkg.pmdoc/index.xml
index 56b96bd2e4..d805747fcf 100644
--- a/tools/osx-pkg.pmdoc/index.xml
+++ b/tools/osx-pkg.pmdoc/index.xml
@@ -1,9 +1,9 @@
-<pkmkdoc spec="1.12"><properties><title>Node</title><build>/Users/ryan/Desktop/Node.pkg</build><organization>org.nodejs</organization><userSees ui="easy"/><min-target os="3"/><domain system="true"/></properties><distribution><versions min-spec="1.000000"/><scripts></scripts></distribution><contents><choice title="node" id="choice3" starts_selected="true" starts_enabled="true" starts_hidden="false"><pkgref id="org.nodejs.pkg"/></choice></contents><resources bg-scale="none" bg-align="topleft"><locale lang="en"><resource relative="true" mod="true" type="background">../doc/mac_osx_nodejs_installer_logo.png</resource><resource mime-type="text/rtf" kind="embedded" type="welcome"><![CDATA[{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
+<pkmkdoc spec="1.12"><properties><title>Node</title><build>/Users/ryan/Desktop/Node.pkg</build><organization>org.nodejs</organization><userSees ui="both"/><min-target os="3"/><domain system="true"/></properties><distribution><versions min-spec="1.000000"/><scripts></scripts></distribution><contents><choice title="node" id="choice3" starts_selected="true" starts_enabled="true" starts_hidden="false"><pkgref id="org.nodejs.pkg"/></choice><choice title="npm" id="choice4" starts_selected="true" starts_enabled="true" starts_hidden="false"><pkgref id="org.nodejs.node.npm.pkg"/></choice></contents><resources bg-scale="none" bg-align="topleft"><locale lang="en"><resource relative="true" mod="true" type="background">../doc/mac_osx_nodejs_installer_logo.png</resource><resource mime-type="text/rtf" kind="embedded" type="welcome"><![CDATA[{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
{\fonttbl\f0\fnil\fcharset0 LucidaGrande;}
{\colortbl;\red255\green255\blue255;}
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural\pardirnatural
-\f0\fs26 \cf0 This package will install node and node-waf into /usr/local/bin}]]></resource><resource mime-type="text/rtf" kind="embedded" type="conclusion"><![CDATA[{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
+\f0\fs26 \cf0 This package will install node and npm into /usr/local/bin}]]></resource><resource mime-type="text/rtf" kind="embedded" type="conclusion"><![CDATA[{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf360
{\fonttbl\f0\fnil\fcharset0 LucidaGrande;}
{\colortbl;\red255\green255\blue255;}
\pard\tx560\tx1120\tx1680\tx2240\tx2800\tx3360\tx3920\tx4480\tx5040\tx5600\tx6160\tx6720\ql\qnatural\pardirnatural
@@ -12,4 +12,8 @@
\
/usr/local/bin/node\
\
-Make sure that /usr/local/bin is in your path.}]]></resource></locale></resources><flags/><item type="file">01local.xml</item><mod>properties.title</mod><mod>properties.userDomain</mod><mod>properties.anywhereDomain</mod><mod>properties.systemDomain</mod></pkmkdoc> \ No newline at end of file
+NPM was installed at\
+\
+ /usr/local/bin/npm\
+\
+Make sure that /usr/local/bin is in your $PATH.}]]></resource></locale></resources><flags/><item type="file">01local.xml</item><item type="file">02npm.xml</item><mod>properties.title</mod><mod>properties.userDomain</mod><mod>properties.anywhereDomain</mod><mod>properties.systemDomain</mod></pkmkdoc> \ No newline at end of file