aboutsummaryrefslogtreecommitdiff
path: root/deps/npm/node_modules/tar/node_modules
diff options
context:
space:
mode:
Diffstat (limited to 'deps/npm/node_modules/tar/node_modules')
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/bench/block-stream-pause.js70
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/bench/block-stream.js68
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/bench/dropper-pause.js70
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/bench/dropper.js68
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/package.json86
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/test/basic.js27
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/test/nopad-thorough.js68
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/test/nopad.js57
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/test/pause-resume.js73
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/test/thorough.js68
-rw-r--r--deps/npm/node_modules/tar/node_modules/block-stream/test/two-stream.js59
11 files changed, 47 insertions, 667 deletions
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/bench/block-stream-pause.js b/deps/npm/node_modules/tar/node_modules/block-stream/bench/block-stream-pause.js
deleted file mode 100644
index 9328844aa6..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/bench/block-stream-pause.js
+++ /dev/null
@@ -1,70 +0,0 @@
-var BlockStream = require("../block-stream.js")
-
-var blockSizes = [16, 25, 1024]
- , writeSizes = [4, 8, 15, 16, 17, 64, 100]
- , writeCounts = [1, 10, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize, {nopad: true })
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
-
- f.on("data", function (c) {
- timeouts ++
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- f.pause()
- setTimeout(function () {
- timeouts --
-
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- f.resume()
- }, 100)
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = writeSize * writeCount * 2
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 100)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/bench/block-stream.js b/deps/npm/node_modules/tar/node_modules/block-stream/bench/block-stream.js
deleted file mode 100644
index 1141f3a84c..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/bench/block-stream.js
+++ /dev/null
@@ -1,68 +0,0 @@
-var BlockStream = require("../block-stream.js")
-
-var blockSizes = [16, 25, 1024]
- , writeSizes = [4, 8, 15, 16, 17, 64, 100]
- , writeCounts = [1, 10, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize, {nopad: true })
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
-
- f.on("data", function (c) {
- timeouts ++
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- setTimeout(function () {
- timeouts --
-
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- }, 100)
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = writeSize * writeCount * 2
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 100)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/bench/dropper-pause.js b/deps/npm/node_modules/tar/node_modules/block-stream/bench/dropper-pause.js
deleted file mode 100644
index 93e4068eea..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/bench/dropper-pause.js
+++ /dev/null
@@ -1,70 +0,0 @@
-var BlockStream = require("dropper")
-
-var blockSizes = [16, 25, 1024]
- , writeSizes = [4, 8, 15, 16, 17, 64, 100]
- , writeCounts = [1, 10, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize, {nopad: true })
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
-
- f.on("data", function (c) {
- timeouts ++
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- f.pause()
- setTimeout(function () {
- timeouts --
-
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- f.resume()
- }, 100)
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = writeSize * writeCount * 2
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 100)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/bench/dropper.js b/deps/npm/node_modules/tar/node_modules/block-stream/bench/dropper.js
deleted file mode 100644
index 55fa133054..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/bench/dropper.js
+++ /dev/null
@@ -1,68 +0,0 @@
-var BlockStream = require("dropper")
-
-var blockSizes = [16, 25, 1024]
- , writeSizes = [4, 8, 15, 16, 17, 64, 100]
- , writeCounts = [1, 10, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize, {nopad: true })
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
-
- f.on("data", function (c) {
- timeouts ++
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- setTimeout(function () {
- timeouts --
-
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- }, 100)
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = writeSize * writeCount * 2
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 100)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/package.json b/deps/npm/node_modules/tar/node_modules/block-stream/package.json
index 97d9d42aba..045ca8d243 100644
--- a/deps/npm/node_modules/tar/node_modules/block-stream/package.json
+++ b/deps/npm/node_modules/tar/node_modules/block-stream/package.json
@@ -1,55 +1,63 @@
{
+ "_from": "block-stream@*",
+ "_id": "block-stream@0.0.9",
+ "_integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=",
+ "_location": "/tar/block-stream",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "block-stream@*",
+ "name": "block-stream",
+ "escapedName": "block-stream",
+ "rawSpec": "*",
+ "saveSpec": null,
+ "fetchSpec": "*"
+ },
+ "_requiredBy": [
+ "/tar"
+ ],
+ "_resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz",
+ "_shasum": "13ebfe778a03205cfe03751481ebb4b3300c126a",
+ "_shrinkwrap": null,
+ "_spec": "block-stream@*",
+ "_where": "/Users/zkat/Documents/code/npm/node_modules/tar",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me",
"url": "http://blog.izs.me/"
},
- "name": "block-stream",
- "description": "a stream of blocks",
- "version": "0.0.8",
- "repository": {
- "type": "git",
- "url": "git://github.com/isaacs/block-stream.git"
- },
- "engines": {
- "node": "0.4 || >=0.5.8"
+ "bin": null,
+ "bugs": {
+ "url": "https://github.com/isaacs/block-stream/issues"
},
- "main": "block-stream.js",
+ "bundleDependencies": false,
"dependencies": {
"inherits": "~2.0.0"
},
+ "deprecated": false,
+ "description": "a stream of blocks",
"devDependencies": {
- "tap": "0.x"
+ "tap": "^5.7.1"
},
- "scripts": {
- "test": "tap test/"
+ "engines": {
+ "node": "0.4 || >=0.5.8"
},
+ "files": [
+ "block-stream.js"
+ ],
+ "homepage": "https://github.com/isaacs/block-stream#readme",
"license": "ISC",
- "gitHead": "b35520314f4763af0788d65a846bb43d9c0a8f02",
- "bugs": {
- "url": "https://github.com/isaacs/block-stream/issues"
+ "main": "block-stream.js",
+ "name": "block-stream",
+ "optionalDependencies": {},
+ "peerDependencies": {},
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/block-stream.git"
},
- "homepage": "https://github.com/isaacs/block-stream#readme",
- "_id": "block-stream@0.0.8",
- "_shasum": "0688f46da2bbf9cff0c4f68225a0cb95cbe8a46b",
- "_from": "block-stream@*",
- "_npmVersion": "2.10.0",
- "_nodeVersion": "2.0.1",
- "_npmUser": {
- "name": "isaacs",
- "email": "isaacs@npmjs.com"
- },
- "dist": {
- "shasum": "0688f46da2bbf9cff0c4f68225a0cb95cbe8a46b",
- "tarball": "http://registry.npmjs.org/block-stream/-/block-stream-0.0.8.tgz"
- },
- "maintainers": [
- {
- "name": "isaacs",
- "email": "i@izs.me"
- }
- ],
- "directories": {},
- "_resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.8.tgz",
- "readme": "ERROR: No README data found!"
+ "scripts": {
+ "test": "tap test/*.js --cov"
+ },
+ "version": "0.0.9"
}
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/test/basic.js b/deps/npm/node_modules/tar/node_modules/block-stream/test/basic.js
deleted file mode 100644
index b4b930511e..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/test/basic.js
+++ /dev/null
@@ -1,27 +0,0 @@
-var tap = require("tap")
- , BlockStream = require("../block-stream.js")
-
-tap.test("basic test", function (t) {
- var b = new BlockStream(16)
- var fs = require("fs")
- var fstr = fs.createReadStream(__filename, {encoding: "utf8"})
- fstr.pipe(b)
-
- var stat
- t.doesNotThrow(function () {
- stat = fs.statSync(__filename)
- }, "stat should not throw")
-
- var totalBytes = 0
- b.on("data", function (c) {
- t.equal(c.length, 16, "chunks should be 16 bytes long")
- t.type(c, Buffer, "chunks should be buffer objects")
- totalBytes += c.length
- })
- b.on("end", function () {
- var expectedBytes = stat.size + (16 - stat.size % 16)
- t.equal(totalBytes, expectedBytes, "Should be multiple of 16")
- t.end()
- })
-
-})
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/test/nopad-thorough.js b/deps/npm/node_modules/tar/node_modules/block-stream/test/nopad-thorough.js
deleted file mode 100644
index 7a8de88b5b..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/test/nopad-thorough.js
+++ /dev/null
@@ -1,68 +0,0 @@
-var BlockStream = require("../block-stream.js")
-
-var blockSizes = [16]//, 25]//, 1024]
- , writeSizes = [4, 15, 16, 17, 64 ]//, 64, 100]
- , writeCounts = [1, 10]//, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize, {nopad: true })
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
-
- f.on("data", function (c) {
- timeouts ++
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- setTimeout(function () {
- timeouts --
-
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- }, 100)
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = writeSize * writeCount * 2
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 100)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/test/nopad.js b/deps/npm/node_modules/tar/node_modules/block-stream/test/nopad.js
deleted file mode 100644
index 6d38429fbc..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/test/nopad.js
+++ /dev/null
@@ -1,57 +0,0 @@
-var BlockStream = require("../")
-var tap = require("tap")
-
-
-tap.test("don't pad, small writes", function (t) {
- var f = new BlockStream(16, { nopad: true })
- t.plan(1)
-
- f.on("data", function (c) {
- t.equal(c.toString(), "abc", "should get 'abc'")
- })
-
- f.on("end", function () { t.end() })
-
- f.write(new Buffer("a"))
- f.write(new Buffer("b"))
- f.write(new Buffer("c"))
- f.end()
-})
-
-tap.test("don't pad, exact write", function (t) {
- var f = new BlockStream(16, { nopad: true })
- t.plan(1)
-
- var first = true
- f.on("data", function (c) {
- if (first) {
- first = false
- t.equal(c.toString(), "abcdefghijklmnop", "first chunk")
- } else {
- t.fail("should only get one")
- }
- })
-
- f.on("end", function () { t.end() })
-
- f.end(new Buffer("abcdefghijklmnop"))
-})
-
-tap.test("don't pad, big write", function (t) {
- var f = new BlockStream(16, { nopad: true })
- t.plan(2)
-
- var first = true
- f.on("data", function (c) {
- if (first) {
- first = false
- t.equal(c.toString(), "abcdefghijklmnop", "first chunk")
- } else {
- t.equal(c.toString(), "q")
- }
- })
-
- f.on("end", function () { t.end() })
-
- f.end(new Buffer("abcdefghijklmnopq"))
-})
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/test/pause-resume.js b/deps/npm/node_modules/tar/node_modules/block-stream/test/pause-resume.js
deleted file mode 100644
index 64d0d091da..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/test/pause-resume.js
+++ /dev/null
@@ -1,73 +0,0 @@
-var BlockStream = require("../block-stream.js")
-
-var blockSizes = [16]
- , writeSizes = [15, 16, 17]
- , writeCounts = [1, 10]//, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize)
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
- var paused = false
-
- f.on("data", function (c) {
- timeouts ++
- t.notOk(paused, "should not be paused when emitting data")
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- paused = true
- f.pause()
- process.nextTick(function () {
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- paused = false
- f.resume()
- timeouts --
- })
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = expectChunks * blockSize
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 200)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/test/thorough.js b/deps/npm/node_modules/tar/node_modules/block-stream/test/thorough.js
deleted file mode 100644
index 1cc9ea08a3..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/test/thorough.js
+++ /dev/null
@@ -1,68 +0,0 @@
-var BlockStream = require("../block-stream.js")
-
-var blockSizes = [16]//, 25]//, 1024]
- , writeSizes = [4, 15, 16, 17, 64 ]//, 64, 100]
- , writeCounts = [1, 10]//, 100]
- , tap = require("tap")
-
-writeCounts.forEach(function (writeCount) {
-blockSizes.forEach(function (blockSize) {
-writeSizes.forEach(function (writeSize) {
- tap.test("writeSize=" + writeSize +
- " blockSize="+blockSize +
- " writeCount="+writeCount, function (t) {
- var f = new BlockStream(blockSize)
-
- var actualChunks = 0
- var actualBytes = 0
- var timeouts = 0
-
- f.on("data", function (c) {
- timeouts ++
-
- actualChunks ++
- actualBytes += c.length
-
- // make sure that no data gets corrupted, and basic sanity
- var before = c.toString()
- // simulate a slow write operation
- setTimeout(function () {
- timeouts --
-
- var after = c.toString()
- t.equal(after, before, "should not change data")
-
- // now corrupt it, to find leaks.
- for (var i = 0; i < c.length; i ++) {
- c[i] = "x".charCodeAt(0)
- }
- }, 100)
- })
-
- f.on("end", function () {
- // round up to the nearest block size
- var expectChunks = Math.ceil(writeSize * writeCount * 2 / blockSize)
- var expectBytes = expectChunks * blockSize
- t.equal(actualBytes, expectBytes,
- "bytes=" + expectBytes + " writeSize=" + writeSize)
- t.equal(actualChunks, expectChunks,
- "chunks=" + expectChunks + " writeSize=" + writeSize)
-
- // wait for all the timeout checks to finish, then end the test
- setTimeout(function WAIT () {
- if (timeouts > 0) return setTimeout(WAIT)
- t.end()
- }, 100)
- })
-
- for (var i = 0; i < writeCount; i ++) {
- var a = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) a[j] = "a".charCodeAt(0)
- var b = new Buffer(writeSize);
- for (var j = 0; j < writeSize; j ++) b[j] = "b".charCodeAt(0)
- f.write(a)
- f.write(b)
- }
- f.end()
- })
-}) }) })
diff --git a/deps/npm/node_modules/tar/node_modules/block-stream/test/two-stream.js b/deps/npm/node_modules/tar/node_modules/block-stream/test/two-stream.js
deleted file mode 100644
index c6db79a43d..0000000000
--- a/deps/npm/node_modules/tar/node_modules/block-stream/test/two-stream.js
+++ /dev/null
@@ -1,59 +0,0 @@
-var log = console.log,
- assert = require( 'assert' ),
- BlockStream = require("../block-stream.js"),
- isize = 0, tsize = 0, fsize = 0, psize = 0, i = 0,
- filter = null, paper = null, stack = null,
-
-// a source data buffer
-tsize = 1 * 1024; // <- 1K
-stack = new Buffer( tsize );
-for ( ; i < tsize; i++) stack[i] = "x".charCodeAt(0);
-
-isize = 1 * 1024; // <- initial packet size with 4K no bug!
-fsize = 2 * 1024 ; // <- first block-stream size
-psize = Math.ceil( isize / 6 ); // <- second block-stream size
-
-fexpected = Math.ceil( tsize / fsize ); // <- packets expected for first
-pexpected = Math.ceil( tsize / psize ); // <- packets expected for second
-
-
-filter = new BlockStream( fsize, { nopad : true } );
-paper = new BlockStream( psize, { nopad : true } );
-
-
-var fcounter = 0;
-filter.on( 'data', function (c) {
- // verify that they're not null-padded
- for (var i = 0; i < c.length; i ++) {
- assert.strictEqual(c[i], "x".charCodeAt(0))
- }
- ++fcounter;
-} );
-
-var pcounter = 0;
-paper.on( 'data', function (c) {
- // verify that they're not null-padded
- for (var i = 0; i < c.length; i ++) {
- assert.strictEqual(c[i], "x".charCodeAt(0))
- }
- ++pcounter;
-} );
-
-filter.pipe( paper );
-
-filter.on( 'end', function () {
- log("fcounter: %s === %s", fcounter, fexpected)
- assert.strictEqual( fcounter, fexpected );
-} );
-
-paper.on( 'end', function () {
- log("pcounter: %s === %s", pcounter, pexpected);
- assert.strictEqual( pcounter, pexpected );
-} );
-
-
-for ( i = 0, j = isize; j <= tsize; j += isize ) {
- filter.write( stack.slice( j - isize, j ) );
-}
-
-filter.end();