diff --git a/.travis.yml b/.travis.yml
index 06ccc81530..1b82118460 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -6,6 +6,10 @@ before_install:
notifications:
email: false
matrix:
+ fast_finish: true
+ allow_failures:
+ - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest"
+ - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest"
include:
- node_js: '0.8'
env: TASK=test
@@ -36,9 +40,9 @@ matrix:
- node_js: 5
env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="-3..latest"
- node_js: 5
- env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..7.1"
+ env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest"
- node_js: 5
- env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..7.1"
+ env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest"
- node_js: 5
env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest"
script: "npm run $TASK"
diff --git a/README.md b/README.md
index ebc105f0a2..b36137654d 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
# readable-stream
-***Node-core v5.6.0 streams for userland*** [](https://travis-ci.org/nodejs/readable-stream)
+***Node-core v5.7.0 streams for userland*** [](https://travis-ci.org/nodejs/readable-stream)
[](https://nodei.co/npm/readable-stream/)
diff --git a/build/build.js b/build/build.js
index ae763950ef..765bf820b3 100644
--- a/build/build.js
+++ b/build/build.js
@@ -5,6 +5,7 @@ const hyperquest = require('hyperzip')(require('hyperdirect'))
, fs = require('fs')
, path = require('path')
, cheerio = require('cheerio')
+ , babel = require('babel-core')
, encoding = 'utf8'
, urlRegex = /^https?:\/\//
, nodeVersion = process.argv[2]
@@ -46,7 +47,12 @@ function processFile (inputLoc, out, replacements) {
replacements.forEach(function (replacement) {
data = data.replace.apply(data, replacement)
})
-
+ if (inputLoc.slice(-3) === '.js') {
+ const transformed = babel.transform(data, {
+ plugins: ['transform-es2015-arrow-functions', 'transform-es2015-block-scoping']
+ })
+ data = transformed.code
+ }
fs.writeFile(out, data, encoding, function (err) {
if (err) throw err
diff --git a/build/common-replacements.js b/build/common-replacements.js
index 61f3d10875..fe5badf6af 100644
--- a/build/common-replacements.js
+++ b/build/common-replacements.js
@@ -39,8 +39,3 @@ module.exports.objectKeysReplacement = [
/Object\.keys/g
, 'objectKeys'
]
-
-module.exports.constReplacement = [
- /\bconst\b/g
- , 'var'
-]
diff --git a/build/files.js b/build/files.js
index c6f390d235..ed13fe2716 100644
--- a/build/files.js
+++ b/build/files.js
@@ -12,7 +12,6 @@ const headRegexp = /(^module.exports = \w+;?)/m
/(require\(['"])(_stream_)/g
, '$1./$2'
]
-
, instanceofReplacement = [
/instanceof Stream\.(\w+)/g
, function (match, streamType) {
@@ -44,15 +43,15 @@ const headRegexp = /(^module.exports = \w+;?)/m
, altIndexOfUseReplacement = require('./common-replacements').altIndexOfUseReplacement
, utilReplacement = [
- /^var util = require\('util'\);/m
- , '\n/**/\nvar util = require(\'core-util-is\');\n'
+ /^const util = require\('util'\);/m
+ , '\n/**/\nconst util = require(\'core-util-is\');\n'
+ 'util.inherits = require(\'inherits\');\n/**/\n'
]
, debugLogReplacement = [
- /var debug = util.debuglog\('stream'\);/
- , '\n\n/**/\nvar debugUtil = require(\'util\');\n'
- + 'var debug;\n'
+ /const debug = util.debuglog\('stream'\);/
+ , '\n\n/**/\nconst debugUtil = require(\'util\');\n'
+ + 'let debug;\n'
+ 'if (debugUtil && debugUtil.debuglog) {\n'
+ ' debug = debugUtil.debuglog(\'stream\');\n'
+ '} else {\n'
@@ -103,8 +102,6 @@ const headRegexp = /(^module.exports = \w+;?)/m
, 'EElistenerCount'
]
- , constReplacement = require('./common-replacements').constReplacement
-
, bufferIsEncodingReplacement = [
/Buffer.isEncoding\((\w+)\)/
, '([\'hex\', \'utf8\', \'utf-8\', \'ascii\', \'binary\', \'base64\',\n'
@@ -113,7 +110,7 @@ const headRegexp = /(^module.exports = \w+;?)/m
]
, requireStreamReplacement = [
- /var Stream = require\('stream'\);/
+ /const Stream = require\('stream'\);/
, '\n\n/**/\n'
+ 'var Stream;\n(function (){try{\n'
+ ' Stream = require(\'st\' + \'ream\');\n'
@@ -140,18 +137,13 @@ const headRegexp = /(^module.exports = \w+;?)/m
]
, internalUtilReplacement = [
- /^var internalUtil = require\('internal\/util'\);/m
- , '\n/**/\nvar internalUtil = {\n deprecate: require(\'util-deprecate\')\n};\n'
+ /^const internalUtil = require\('internal\/util'\);/m
+ , '\n/**/\nconst internalUtil = {\n deprecate: require(\'util-deprecate\')\n};\n'
+ '/**/\n'
]
- ,
- letReplacements = [
- /\blet\b/g
- , 'var'
- ]
+
module.exports['_stream_duplex.js'] = [
- constReplacement
- , requireReplacement
+ requireReplacement
, instanceofReplacement
, utilReplacement
, stringDecoderReplacement
@@ -164,16 +156,14 @@ module.exports['_stream_duplex.js'] = [
]
module.exports['_stream_passthrough.js'] = [
- constReplacement
- , requireReplacement
+ requireReplacement
, instanceofReplacement
, utilReplacement
, stringDecoderReplacement
]
module.exports['_stream_readable.js'] = [
- constReplacement
- , addDuplexRequire
+ addDuplexRequire
, requireReplacement
, instanceofReplacement
, bufferReplacement
@@ -194,20 +184,17 @@ module.exports['_stream_readable.js'] = [
, processNextTickImport
, processNextTickReplacement
, eventEmittterListenerCountReplacement
- , letReplacements
]
module.exports['_stream_transform.js'] = [
- constReplacement
- , requireReplacement
+ requireReplacement
, instanceofReplacement
, utilReplacement
, stringDecoderReplacement
]
module.exports['_stream_writable.js'] = [
- constReplacement
- , addDuplexRequire
+ addDuplexRequire
, requireReplacement
, instanceofReplacement
, bufferReplacement
@@ -224,4 +211,5 @@ module.exports['_stream_writable.js'] = [
, processNextTickImport
, processNextTickReplacement
, internalUtilReplacement
+
]
diff --git a/build/package.json b/build/package.json
index 7da9727fe4..d2c9344a03 100644
--- a/build/package.json
+++ b/build/package.json
@@ -4,9 +4,12 @@
"description": "",
"main": "build.js",
"dependencies": {
+ "babel-core": "^6.5.2",
+ "babel-plugin-transform-es2015-arrow-functions": "^6.5.2",
+ "babel-plugin-transform-es2015-block-scoping": "^6.5.0",
"bl": "~0.6.0",
- "hyperzip": "0.0.0",
+ "cheerio": "~0.13.1",
"hyperdirect": "0.0.0",
- "cheerio": "~0.13.1"
+ "hyperzip": "0.0.0"
}
}
diff --git a/build/test-replacements.js b/build/test-replacements.js
index a1ab365990..0c6d052a79 100644
--- a/build/test-replacements.js
+++ b/build/test-replacements.js
@@ -6,8 +6,6 @@ const altForEachImplReplacement = require('./common-replacements').altForEachImp
require('./common-replacements').objectKeysDefine
, objectKeysReplacement =
require('./common-replacements').objectKeysReplacement
- , constReplacement =
- require('./common-replacements').constReplacement
module.exports.all = [
[
@@ -32,7 +30,6 @@ module.exports.all = [
/Stream.(Readable|Writable|Duplex|Transform|PassThrough)/g
, 'require(\'../../\').$1'
]
- , constReplacement
]
@@ -66,7 +63,6 @@ module.exports['common.js'] = [
, objectKeysReplacement
, altForEachImplReplacement
, altForEachUseReplacement
- , constReplacement
, [
/(exports.mustCall[\s\S]*)/m
@@ -147,21 +143,16 @@ module.exports['common.js'] = [
/require\(['"]stream['"]\)/g
, 'require(\'../\')'
],
- [/forEach\(data, line => \{\n\s+this\.emit\('data', line \+ '\\n'\);\n\s+\}\);/m,
- `var self = this;
- forEach(data, function(line) {
- self.emit('data', line + '\\n');
- });`
- ],
- [
- /(varructor,)/,
- '// $1'
- ],
[
/^var util = require\('util'\);/m
, '\n/**/\nvar util = require(\'core-util-is\');\n'
+ 'util.inherits = require(\'inherits\');\n/**/\n'
- ]
+ ],
+ [
+ /^const util = require\('util'\);/m
+, '\n/**/\nvar util = require(\'core-util-is\');\n'
+ + 'util.inherits = require(\'inherits\');\n/**/\n'
+]
]
// this test has some trouble with the nextTick depth when run
@@ -191,7 +182,12 @@ module.exports['test-stream2-large-read-stall.js'] = [
module.exports['test-stream-pipe-cleanup.js'] = [
[
/(function Writable\(\) \{)/
- , 'if (/^v0\\.8\\./.test(process.version))\n return\n\n$1'
+ , '(function (){\nif (/^v0\\.8\\./.test(process.version))\n return\n\n$1'
+ ]
+ ,
+ [
+ /$/
+ ,'}())'
]
]
@@ -218,25 +214,31 @@ module.exports['test-stream-pipe-without-listenerCount.js'] = [
[
/require\(\'stream\'\)/g,
'stream'
- ],
- [
- /const /g,
- 'var '
]
]
-module.exports['test-stream-pipe-cleanup-pause.js'] = [
+module.exports['test-stream2-readable-empty-buffer-no-eof.js'] = [
[
- /const /g,
- 'var '
+ `const buf = Buffer(5).fill('x');`,
+ `const buf = new Buffer(5);
+ buf.fill('x');`
]
]
-module.exports['test-stream2-readable-empty-buffer-no-eof.js'] = [[
- /let /g,
- 'var '],
+
+module.exports['test-stream2-unpipe-drain.js'] = [
[
- `var buf = Buffer(5).fill('x');`,
- `var buf = new Buffer(5);
- buf.fill('x');`
+ /^/,
+ `(function () {\n`
+ ],
+ [
+ /$/
+ ,'}())'
]
]
+
+module.exports['test-stream2-decode-partial.js'] = [
+ [
+ /readable\.push\(source\.slice\(4, 6\)\)/
+ ,`readable.push(source.slice(4, source.length));`
+ ]
+]
diff --git a/doc/stream.markdown b/doc/stream.markdown
index 33f9c2602e..67a745300c 100644
--- a/doc/stream.markdown
+++ b/doc/stream.markdown
@@ -164,7 +164,7 @@ Not all streams will emit the `'close'` event.
#### Event: 'data'
-* `chunk` {Buffer | String} The chunk of data.
+* `chunk` {Buffer|String} The chunk of data.
Attaching a `'data'` event listener to a stream that has not been
explicitly paused will switch the stream into flowing mode. Data will
@@ -254,7 +254,7 @@ end
#### readable.isPaused()
-* Return: `Boolean`
+* Return: {Boolean}
This method returns whether or not the `readable` has been **explicitly**
paused by client code (using [`stream.pause()`][stream-pause] without a
@@ -293,7 +293,7 @@ readable.on('data', (chunk) => {
#### readable.pipe(destination[, options])
-* `destination` {[Writable][] Stream} The destination for writing data
+* `destination` {stream.Writable} The destination for writing data
* `options` {Object} Pipe options
* `end` {Boolean} End the writer when the reader ends. Default = `true`
@@ -346,7 +346,7 @@ the process exits, regardless of the specified options.
#### readable.read([size])
* `size` {Number} Optional argument to specify how much data to read.
-* Return {String | Buffer | null}
+* Return {String|Buffer|Null}
The `read()` method pulls some data out of the internal buffer and
returns it. If there is no data available, then it will return
@@ -427,7 +427,7 @@ readable.on('data', (chunk) => {
#### readable.unpipe([destination])
-* `destination` {[Writable][] Stream} Optional specific stream to unpipe
+* `destination` {stream.Writable} Optional specific stream to unpipe
This method will remove the hooks set up for a previous [`stream.pipe()`][]
call.
@@ -453,7 +453,7 @@ setTimeout(() => {
#### readable.unshift(chunk)
-* `chunk` {Buffer | String} Chunk of data to unshift onto the read queue
+* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue
This is useful in certain cases where a stream is being consumed by a
parser, which needs to "un-consume" some data that it has
@@ -605,7 +605,7 @@ function writeOneMillionTimes(writer, data, encoding, callback) {
#### Event: 'error'
-* {Error object}
+* {Error}
Emitted if there was an error when writing or piping data.
@@ -627,7 +627,7 @@ writer.on('finish', () => {
#### Event: 'pipe'
-* `src` {[Readable][] Stream} source stream that is piping to this writable
+* `src` {stream.Readable} source stream that is piping to this writable
This is emitted whenever the [`stream.pipe()`][] method is called on a readable
stream, adding this writable to its set of destinations.
@@ -670,7 +670,7 @@ Buffered data will be flushed either at [`stream.uncork()`][] or at
#### writable.end([chunk][, encoding][, callback])
-* `chunk` {String | Buffer} Optional data to write
+* `chunk` {String|Buffer} Optional data to write
* `encoding` {String} The encoding, if `chunk` is a String
* `callback` {Function} Optional callback for when the stream is finished
@@ -700,7 +700,7 @@ Flush all data, buffered since [`stream.cork()`][] call.
#### writable.write(chunk[, encoding][, callback])
-* `chunk` {String | Buffer} The data to write
+* `chunk` {String|Buffer} The data to write
* `encoding` {String} The encoding, if `chunk` is a String
* `callback` {Function} Callback for when this chunk of data is flushed
* Returns: {Boolean} `true` if the data was handled completely.
@@ -901,8 +901,9 @@ becomes available. There is no need, for example to "wait" until
#### readable.push(chunk[, encoding])
-* `chunk` {Buffer | null | String} Chunk of data to push into the read queue
-* `encoding` {String} Encoding of String chunks. Must be a valid
+
+* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue
+* `encoding` {String} Encoding of String chunks. Must be a valid
Buffer encoding, such as `'utf8'` or `'ascii'`
* return {Boolean} Whether or not more pushes should be performed
@@ -1167,7 +1168,7 @@ your own extension classes.
#### transform.\_transform(chunk, encoding, callback)
-* `chunk` {Buffer | String} The chunk to be transformed. Will **always**
+* `chunk` {Buffer|String} The chunk to be transformed. Will **always**
be a buffer unless the `decodeStrings` option was set to `false`.
* `encoding` {String} If the chunk is a string, then this is the
encoding type. If chunk is a buffer, then this is the special
@@ -1328,7 +1329,7 @@ initialized.
#### writable.\_write(chunk, encoding, callback)
-* `chunk` {Buffer | String} The chunk to be written. Will **always**
+* `chunk` {Buffer|String} The chunk to be written. Will **always**
be a buffer unless the `decodeStrings` option was set to `false`.
* `encoding` {String} If the chunk is a string, then this is the
encoding type. If chunk is a buffer, then this is the special
@@ -1711,30 +1712,30 @@ horribly wrong.
[`'end'`]: #stream_event_end
[`'finish'`]: #stream_event_finish
[`'readable'`]: #stream_event_readable
-[`buf.toString(encoding)`]: https://nodejs.org/docs/v5.6.0/api/buffer.html#buffer_buf_tostring_encoding_start_end
-[`EventEmitter`]: https://nodejs.org/docs/v5.6.0/api/events.html#events_class_eventemitter
-[`process.stderr`]: https://nodejs.org/docs/v5.6.0/api/process.html#process_process_stderr
-[`process.stdin`]: https://nodejs.org/docs/v5.6.0/api/process.html#process_process_stdin
-[`process.stdout`]: https://nodejs.org/docs/v5.6.0/api/process.html#process_process_stdout
+[`buf.toString(encoding)`]: https://nodejs.org/docs/v5.7.0/api/buffer.html#buffer_buf_tostring_encoding_start_end
+[`EventEmitter`]: https://nodejs.org/docs/v5.7.0/api/events.html#events_class_eventemitter
+[`process.stderr`]: https://nodejs.org/docs/v5.7.0/api/process.html#process_process_stderr
+[`process.stdin`]: https://nodejs.org/docs/v5.7.0/api/process.html#process_process_stdin
+[`process.stdout`]: https://nodejs.org/docs/v5.7.0/api/process.html#process_process_stdout
[`stream.cork()`]: #stream_writable_cork
[`stream.pipe()`]: #stream_readable_pipe_destination_options
[`stream.uncork()`]: #stream_writable_uncork
[`stream.unpipe()`]: #stream_readable_unpipe_destination
[`stream.wrap()`]: #stream_readable_wrap_stream
-[`tls.CryptoStream`]: https://nodejs.org/docs/v5.6.0/api/tls.html#tls_class_cryptostream
-[`util.inherits()`]: https://nodejs.org/docs/v5.6.0/api/util.html#util_util_inherits_constructor_superconstructor
+[`tls.CryptoStream`]: https://nodejs.org/docs/v5.7.0/api/tls.html#tls_class_cryptostream
+[`util.inherits()`]: https://nodejs.org/docs/v5.7.0/api/util.html#util_util_inherits_constructor_superconstructor
[API for Stream Consumers]: #stream_api_for_stream_consumers
[API for Stream Implementors]: #stream_api_for_stream_implementors
-[child process stdin]: https://nodejs.org/docs/v5.6.0/api/child_process.html#child_process_child_stdin
-[child process stdout and stderr]: https://nodejs.org/docs/v5.6.0/api/child_process.html#child_process_child_stdout
+[child process stdin]: https://nodejs.org/docs/v5.7.0/api/child_process.html#child_process_child_stdin
+[child process stdout and stderr]: https://nodejs.org/docs/v5.7.0/api/child_process.html#child_process_child_stdout
[Compatibility]: #stream_compatibility_with_older_node_js_versions
[crypto]: crypto.html
[Duplex]: #stream_class_stream_duplex
-[fs read streams]: https://nodejs.org/docs/v5.6.0/api/fs.html#fs_class_fs_readstream
-[fs write streams]: https://nodejs.org/docs/v5.6.0/api/fs.html#fs_class_fs_writestream
-[HTTP requests, on the client]: https://nodejs.org/docs/v5.6.0/api/http.html#http_class_http_clientrequest
-[HTTP responses, on the server]: https://nodejs.org/docs/v5.6.0/api/http.html#http_class_http_serverresponse
-[http-incoming-message]: https://nodejs.org/docs/v5.6.0/api/http.html#http_class_http_incomingmessage
+[fs read streams]: https://nodejs.org/docs/v5.7.0/api/fs.html#fs_class_fs_readstream
+[fs write streams]: https://nodejs.org/docs/v5.7.0/api/fs.html#fs_class_fs_writestream
+[HTTP requests, on the client]: https://nodejs.org/docs/v5.7.0/api/http.html#http_class_http_clientrequest
+[HTTP responses, on the server]: https://nodejs.org/docs/v5.7.0/api/http.html#http_class_http_serverresponse
+[http-incoming-message]: https://nodejs.org/docs/v5.7.0/api/http.html#http_class_http_incomingmessage
[Object mode]: #stream_object_mode
[Readable]: #stream_class_stream_readable
[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2
@@ -1749,7 +1750,7 @@ horribly wrong.
[stream-read]: #stream_readable_read_size
[stream-resume]: #stream_readable_resume
[stream-write]: #stream_writable_write_chunk_encoding_callback
-[TCP sockets]: https://nodejs.org/docs/v5.6.0/api/net.html#net_class_net_socket
+[TCP sockets]: https://nodejs.org/docs/v5.7.0/api/net.html#net_class_net_socket
[Transform]: #stream_class_stream_transform
[Writable]: #stream_class_stream_writable
[zlib]: zlib.html
diff --git a/lib/_stream_duplex.js b/lib/_stream_duplex.js
index 69558af037..736693b840 100644
--- a/lib/_stream_duplex.js
+++ b/lib/_stream_duplex.js
@@ -6,22 +6,21 @@
'use strict';
/**/
+
var objectKeys = Object.keys || function (obj) {
var keys = [];
- for (var key in obj) keys.push(key);
- return keys;
-}
+ for (var key in obj) {
+ keys.push(key);
+ }return keys;
+};
/**/
-
module.exports = Duplex;
/**/
var processNextTick = require('process-nextick-args');
/**/
-
-
/**/
var util = require('core-util-is');
util.inherits = require('inherits');
@@ -35,26 +34,21 @@ util.inherits(Duplex, Readable);
var keys = objectKeys(Writable.prototype);
for (var v = 0; v < keys.length; v++) {
var method = keys[v];
- if (!Duplex.prototype[method])
- Duplex.prototype[method] = Writable.prototype[method];
+ if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
}
function Duplex(options) {
- if (!(this instanceof Duplex))
- return new Duplex(options);
+ if (!(this instanceof Duplex)) return new Duplex(options);
Readable.call(this, options);
Writable.call(this, options);
- if (options && options.readable === false)
- this.readable = false;
+ if (options && options.readable === false) this.readable = false;
- if (options && options.writable === false)
- this.writable = false;
+ if (options && options.writable === false) this.writable = false;
this.allowHalfOpen = true;
- if (options && options.allowHalfOpen === false)
- this.allowHalfOpen = false;
+ if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
this.once('end', onend);
}
@@ -63,8 +57,7 @@ function Duplex(options) {
function onend() {
// if we allow half-open state, or if the writable side ended,
// then we're ok.
- if (this.allowHalfOpen || this._writableState.ended)
- return;
+ if (this.allowHalfOpen || this._writableState.ended) return;
// no more data can be written.
// But allow more writes to happen in this tick.
@@ -75,8 +68,8 @@ function onEndNT(self) {
self.end();
}
-function forEach (xs, f) {
+function forEach(xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
-}
+}
\ No newline at end of file
diff --git a/lib/_stream_passthrough.js b/lib/_stream_passthrough.js
index bddfdd0153..d06f71f186 100644
--- a/lib/_stream_passthrough.js
+++ b/lib/_stream_passthrough.js
@@ -16,12 +16,11 @@ util.inherits = require('inherits');
util.inherits(PassThrough, Transform);
function PassThrough(options) {
- if (!(this instanceof PassThrough))
- return new PassThrough(options);
+ if (!(this instanceof PassThrough)) return new PassThrough(options);
Transform.call(this, options);
}
-PassThrough.prototype._transform = function(chunk, encoding, cb) {
+PassThrough.prototype._transform = function (chunk, encoding, cb) {
cb(null, chunk);
-};
+};
\ No newline at end of file
diff --git a/lib/_stream_readable.js b/lib/_stream_readable.js
index 4144427d69..54a9d5c553 100644
--- a/lib/_stream_readable.js
+++ b/lib/_stream_readable.js
@@ -6,12 +6,10 @@ module.exports = Readable;
var processNextTick = require('process-nextick-args');
/**/
-
/**/
var isArray = require('isarray');
/**/
-
/**/
var Buffer = require('buffer').Buffer;
/**/
@@ -21,21 +19,20 @@ Readable.ReadableState = ReadableState;
var EE = require('events');
/**/
-var EElistenerCount = function(emitter, type) {
+var EElistenerCount = function (emitter, type) {
return emitter.listeners(type).length;
};
/**/
-
-
/**/
var Stream;
-(function (){try{
- Stream = require('st' + 'ream');
-}catch(_){}finally{
- if (!Stream)
- Stream = require('events').EventEmitter;
-}}())
+(function () {
+ try {
+ Stream = require('st' + 'ream');
+ } catch (_) {} finally {
+ if (!Stream) Stream = require('events').EventEmitter;
+ }
+})();
/**/
var Buffer = require('buffer').Buffer;
@@ -45,11 +42,9 @@ var util = require('core-util-is');
util.inherits = require('inherits');
/**/
-
-
/**/
var debugUtil = require('util');
-var debug;
+var debug = undefined;
if (debugUtil && debugUtil.debuglog) {
debug = debugUtil.debuglog('stream');
} else {
@@ -71,17 +66,16 @@ function ReadableState(options, stream) {
// make all the buffer merging and length checks go away
this.objectMode = !!options.objectMode;
- if (stream instanceof Duplex)
- this.objectMode = this.objectMode || !!options.readableObjectMode;
+ if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
// the point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
var hwm = options.highWaterMark;
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
- this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
+ this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
// cast to ints.
- this.highWaterMark = ~~this.highWaterMark;
+ this.highWaterMark = ~ ~this.highWaterMark;
this.buffer = [];
this.length = 0;
@@ -123,8 +117,7 @@ function ReadableState(options, stream) {
this.decoder = null;
this.encoding = null;
if (options.encoding) {
- if (!StringDecoder)
- StringDecoder = require('string_decoder/').StringDecoder;
+ if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
this.decoder = new StringDecoder(options.encoding);
this.encoding = options.encoding;
}
@@ -134,16 +127,14 @@ var Duplex;
function Readable(options) {
Duplex = Duplex || require('./_stream_duplex');
- if (!(this instanceof Readable))
- return new Readable(options);
+ if (!(this instanceof Readable)) return new Readable(options);
this._readableState = new ReadableState(options, this);
// legacy
this.readable = true;
- if (options && typeof options.read === 'function')
- this._read = options.read;
+ if (options && typeof options.read === 'function') this._read = options.read;
Stream.call(this);
}
@@ -152,7 +143,7 @@ function Readable(options) {
// This returns true if the highWaterMark has not been hit yet,
// similar to how Writable.write() returns true if you should
// write() some more.
-Readable.prototype.push = function(chunk, encoding) {
+Readable.prototype.push = function (chunk, encoding) {
var state = this._readableState;
if (!state.objectMode && typeof chunk === 'string') {
@@ -167,12 +158,12 @@ Readable.prototype.push = function(chunk, encoding) {
};
// Unshift should *always* be something directly out of read()
-Readable.prototype.unshift = function(chunk) {
+Readable.prototype.unshift = function (chunk) {
var state = this._readableState;
return readableAddChunk(this, state, chunk, '', true);
};
-Readable.prototype.isPaused = function() {
+Readable.prototype.isPaused = function () {
return this._readableState.flowing === false;
};
@@ -191,26 +182,28 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var e = new Error('stream.unshift() after end event');
stream.emit('error', e);
} else {
- if (state.decoder && !addToFront && !encoding)
+ var skipAdd;
+ if (state.decoder && !addToFront && !encoding) {
chunk = state.decoder.write(chunk);
+ skipAdd = !state.objectMode && chunk.length === 0;
+ }
- if (!addToFront)
- state.reading = false;
-
- // if we want the data now, just emit it.
- if (state.flowing && state.length === 0 && !state.sync) {
- stream.emit('data', chunk);
- stream.read(0);
- } else {
- // update the buffer info.
- state.length += state.objectMode ? 1 : chunk.length;
- if (addToFront)
- state.buffer.unshift(chunk);
- else
- state.buffer.push(chunk);
-
- if (state.needReadable)
- emitReadable(stream);
+ if (!addToFront) state.reading = false;
+
+ // Don't add to the buffer if we've decoded to an empty string chunk and
+ // we're not in object mode
+ if (!skipAdd) {
+ // if we want the data now, just emit it.
+ if (state.flowing && state.length === 0 && !state.sync) {
+ stream.emit('data', chunk);
+ stream.read(0);
+ } else {
+ // update the buffer info.
+ state.length += state.objectMode ? 1 : chunk.length;
+ if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
+
+ if (state.needReadable) emitReadable(stream);
+ }
}
maybeReadMore(stream, state);
@@ -222,7 +215,6 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
return needMoreData(state);
}
-
// if it's past the high water mark, we can push in some more.
// Also, if we have no data yet, we can stand some
// more bytes. This is to work around cases where hwm=0,
@@ -231,16 +223,12 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
// needReadable was set, then we ought to push more, so that another
// 'readable' event will be triggered.
function needMoreData(state) {
- return !state.ended &&
- (state.needReadable ||
- state.length < state.highWaterMark ||
- state.length === 0);
+ return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
}
// backwards compatibility.
-Readable.prototype.setEncoding = function(enc) {
- if (!StringDecoder)
- StringDecoder = require('string_decoder/').StringDecoder;
+Readable.prototype.setEncoding = function (enc) {
+ if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
this._readableState.decoder = new StringDecoder(enc);
this._readableState.encoding = enc;
return this;
@@ -265,29 +253,22 @@ function computeNewHighWaterMark(n) {
}
function howMuchToRead(n, state) {
- if (state.length === 0 && state.ended)
- return 0;
+ if (state.length === 0 && state.ended) return 0;
- if (state.objectMode)
- return n === 0 ? 0 : 1;
+ if (state.objectMode) return n === 0 ? 0 : 1;
if (n === null || isNaN(n)) {
// only flow one buffer at a time
- if (state.flowing && state.buffer.length)
- return state.buffer[0].length;
- else
- return state.length;
+ if (state.flowing && state.buffer.length) return state.buffer[0].length;else return state.length;
}
- if (n <= 0)
- return 0;
+ if (n <= 0) return 0;
// If we're asking for more than the target buffer level,
// then raise the water mark. Bump up to the next highest
// power of 2, to prevent increasing it excessively in tiny
// amounts.
- if (n > state.highWaterMark)
- state.highWaterMark = computeNewHighWaterMark(n);
+ if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
// don't have that much. return null, unless we've ended.
if (n > state.length) {
@@ -303,25 +284,19 @@ function howMuchToRead(n, state) {
}
// you can override either this method, or the async _read(n) below.
-Readable.prototype.read = function(n) {
+Readable.prototype.read = function (n) {
debug('read', n);
var state = this._readableState;
var nOrig = n;
- if (typeof n !== 'number' || n > 0)
- state.emittedReadable = false;
+ if (typeof n !== 'number' || n > 0) state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
// the 'readable' event and move on.
- if (n === 0 &&
- state.needReadable &&
- (state.length >= state.highWaterMark || state.ended)) {
+ if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
debug('read: emitReadable', state.length, state.ended);
- if (state.length === 0 && state.ended)
- endReadable(this);
- else
- emitReadable(this);
+ if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
return null;
}
@@ -329,8 +304,7 @@ Readable.prototype.read = function(n) {
// if we've ended, and we're now clear, then finish it up.
if (n === 0 && state.ended) {
- if (state.length === 0)
- endReadable(this);
+ if (state.length === 0) endReadable(this);
return null;
}
@@ -378,8 +352,7 @@ Readable.prototype.read = function(n) {
state.reading = true;
state.sync = true;
// if the length is currently zero, then we *need* a readable event.
- if (state.length === 0)
- state.needReadable = true;
+ if (state.length === 0) state.needReadable = true;
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
@@ -387,14 +360,10 @@ Readable.prototype.read = function(n) {
// If _read pushed data synchronously, then `reading` will be false,
// and we need to re-evaluate how much data we can return to the user.
- if (doRead && !state.reading)
- n = howMuchToRead(nOrig, state);
+ if (doRead && !state.reading) n = howMuchToRead(nOrig, state);
var ret;
- if (n > 0)
- ret = fromList(n, state);
- else
- ret = null;
+ if (n > 0) ret = fromList(n, state);else ret = null;
if (ret === null) {
state.needReadable = true;
@@ -405,32 +374,24 @@ Readable.prototype.read = function(n) {
// If we have nothing in the buffer, then we want to know
// as soon as we *do* get something into the buffer.
- if (state.length === 0 && !state.ended)
- state.needReadable = true;
+ if (state.length === 0 && !state.ended) state.needReadable = true;
// If we tried to read() past the EOF, then emit end on the next tick.
- if (nOrig !== n && state.ended && state.length === 0)
- endReadable(this);
+ if (nOrig !== n && state.ended && state.length === 0) endReadable(this);
- if (ret !== null)
- this.emit('data', ret);
+ if (ret !== null) this.emit('data', ret);
return ret;
};
function chunkInvalid(state, chunk) {
var er = null;
- if (!(Buffer.isBuffer(chunk)) &&
- typeof chunk !== 'string' &&
- chunk !== null &&
- chunk !== undefined &&
- !state.objectMode) {
+ if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) {
er = new TypeError('Invalid non-string/buffer chunk');
}
return er;
}
-
function onEofChunk(stream, state) {
if (state.ended) return;
if (state.decoder) {
@@ -455,10 +416,7 @@ function emitReadable(stream) {
if (!state.emittedReadable) {
debug('emitReadable', state.flowing);
state.emittedReadable = true;
- if (state.sync)
- processNextTick(emitReadable_, stream);
- else
- emitReadable_(stream);
+ if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream);
}
}
@@ -468,7 +426,6 @@ function emitReadable_(stream) {
flow(stream);
}
-
// at this point, the user has presumably seen the 'readable' event,
// and called read() to consume some data. that may have triggered
// in turn another _read(n) call, in which case reading = true if
@@ -484,15 +441,12 @@ function maybeReadMore(stream, state) {
function maybeReadMore_(stream, state) {
var len = state.length;
- while (!state.reading && !state.flowing && !state.ended &&
- state.length < state.highWaterMark) {
+ while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
debug('maybeReadMore read 0');
stream.read(0);
if (len === state.length)
// didn't get any data, stop spinning.
- break;
- else
- len = state.length;
+ break;else len = state.length;
}
state.readingMore = false;
}
@@ -501,11 +455,11 @@ function maybeReadMore_(stream, state) {
// call cb(er, data) where data is <= n in length.
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
-Readable.prototype._read = function(n) {
+Readable.prototype._read = function (n) {
this.emit('error', new Error('not implemented'));
};
-Readable.prototype.pipe = function(dest, pipeOpts) {
+Readable.prototype.pipe = function (dest, pipeOpts) {
var src = this;
var state = this._readableState;
@@ -523,15 +477,10 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
state.pipesCount += 1;
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
- var doEnd = (!pipeOpts || pipeOpts.end !== false) &&
- dest !== process.stdout &&
- dest !== process.stderr;
+ var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
var endFn = doEnd ? onend : cleanup;
- if (state.endEmitted)
- processNextTick(endFn);
- else
- src.once('end', endFn);
+ if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn);
dest.on('unpipe', onunpipe);
function onunpipe(readable) {
@@ -573,9 +522,7 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
// flowing again.
// So, if this is awaiting a drain, then we just call it now.
// If we don't know, then assume that we are waiting for one.
- if (state.awaitDrain &&
- (!dest._writableState || dest._writableState.needDrain))
- ondrain();
+ if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
}
src.on('data', ondata);
@@ -586,10 +533,7 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
// If the user unpiped during `dest.write()`, it is possible
// to get stuck in a permanently paused state if that write
// also returned false.
- if (state.pipesCount === 1 &&
- state.pipes[0] === dest &&
- src.listenerCount('data') === 1 &&
- !cleanedUp) {
+ if (state.pipesCount === 1 && state.pipes[0] === dest && src.listenerCount('data') === 1 && !cleanedUp) {
debug('false write response, pause', src._readableState.awaitDrain);
src._readableState.awaitDrain++;
}
@@ -603,18 +547,11 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
debug('onerror', er);
unpipe();
dest.removeListener('error', onerror);
- if (EElistenerCount(dest, 'error') === 0)
- dest.emit('error', er);
+ if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
}
// This is a brutally ugly hack to make sure that our error handler
// is attached before any userland ones. NEVER DO THIS.
- if (!dest._events || !dest._events.error)
- dest.on('error', onerror);
- else if (isArray(dest._events.error))
- dest._events.error.unshift(onerror);
- else
- dest._events.error = [onerror, dest._events.error];
-
+ if (!dest._events || !dest._events.error) dest.on('error', onerror);else if (isArray(dest._events.error)) dest._events.error.unshift(onerror);else dest._events.error = [onerror, dest._events.error];
// Both close and finish should trigger unpipe, but only once.
function onclose() {
@@ -647,11 +584,10 @@ Readable.prototype.pipe = function(dest, pipeOpts) {
};
function pipeOnDrain(src) {
- return function() {
+ return function () {
var state = src._readableState;
debug('pipeOnDrain', state.awaitDrain);
- if (state.awaitDrain)
- state.awaitDrain--;
+ if (state.awaitDrain) state.awaitDrain--;
if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
state.flowing = true;
flow(src);
@@ -659,29 +595,24 @@ function pipeOnDrain(src) {
};
}
-
-Readable.prototype.unpipe = function(dest) {
+Readable.prototype.unpipe = function (dest) {
var state = this._readableState;
// if we're not piping anywhere, then do nothing.
- if (state.pipesCount === 0)
- return this;
+ if (state.pipesCount === 0) return this;
// just one destination. most common case.
if (state.pipesCount === 1) {
// passed in one, but it's not the right one.
- if (dest && dest !== state.pipes)
- return this;
+ if (dest && dest !== state.pipes) return this;
- if (!dest)
- dest = state.pipes;
+ if (!dest) dest = state.pipes;
// got a match.
state.pipes = null;
state.pipesCount = 0;
state.flowing = false;
- if (dest)
- dest.emit('unpipe', this);
+ if (dest) dest.emit('unpipe', this);
return this;
}
@@ -695,20 +626,18 @@ Readable.prototype.unpipe = function(dest) {
state.pipesCount = 0;
state.flowing = false;
- for (var i = 0; i < len; i++)
- dests[i].emit('unpipe', this);
- return this;
+ for (var _i = 0; _i < len; _i++) {
+ dests[_i].emit('unpipe', this);
+ }return this;
}
// try to find the right one.
var i = indexOf(state.pipes, dest);
- if (i === -1)
- return this;
+ if (i === -1) return this;
state.pipes.splice(i, 1);
state.pipesCount -= 1;
- if (state.pipesCount === 1)
- state.pipes = state.pipes[0];
+ if (state.pipesCount === 1) state.pipes = state.pipes[0];
dest.emit('unpipe', this);
@@ -717,7 +646,7 @@ Readable.prototype.unpipe = function(dest) {
// set up data events if they are asked for
// Ensure readable listeners eventually get something
-Readable.prototype.on = function(ev, fn) {
+Readable.prototype.on = function (ev, fn) {
var res = Stream.prototype.on.call(this, ev, fn);
// If listening to data, and it has not explicitly been paused,
@@ -751,7 +680,7 @@ function nReadingNextTick(self) {
// pause() and resume() are remnants of the legacy readable stream API
// If the user uses them, then switch into old mode.
-Readable.prototype.resume = function() {
+Readable.prototype.resume = function () {
var state = this._readableState;
if (!state.flowing) {
debug('resume');
@@ -777,11 +706,10 @@ function resume_(stream, state) {
state.resumeScheduled = false;
stream.emit('resume');
flow(stream);
- if (state.flowing && !state.reading)
- stream.read(0);
+ if (state.flowing && !state.reading) stream.read(0);
}
-Readable.prototype.pause = function() {
+Readable.prototype.pause = function () {
debug('call pause flowing=%j', this._readableState.flowing);
if (false !== this._readableState.flowing) {
debug('pause');
@@ -804,32 +732,27 @@ function flow(stream) {
// wrap an old-style stream as the async data source.
// This is *not* part of the readable stream interface.
// It is an ugly unfortunate mess of history.
-Readable.prototype.wrap = function(stream) {
+Readable.prototype.wrap = function (stream) {
var state = this._readableState;
var paused = false;
var self = this;
- stream.on('end', function() {
+ stream.on('end', function () {
debug('wrapped end');
if (state.decoder && !state.ended) {
var chunk = state.decoder.end();
- if (chunk && chunk.length)
- self.push(chunk);
+ if (chunk && chunk.length) self.push(chunk);
}
self.push(null);
});
- stream.on('data', function(chunk) {
+ stream.on('data', function (chunk) {
debug('wrapped data');
- if (state.decoder)
- chunk = state.decoder.write(chunk);
+ if (state.decoder) chunk = state.decoder.write(chunk);
// don't skip over falsy values in objectMode
- if (state.objectMode && (chunk === null || chunk === undefined))
- return;
- else if (!state.objectMode && (!chunk || !chunk.length))
- return;
+ if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
var ret = self.push(chunk);
if (!ret) {
@@ -842,21 +765,23 @@ Readable.prototype.wrap = function(stream) {
// important when wrapping filters and duplexes.
for (var i in stream) {
if (this[i] === undefined && typeof stream[i] === 'function') {
- this[i] = function(method) { return function() {
- return stream[method].apply(stream, arguments);
- }; }(i);
+ this[i] = function (method) {
+ return function () {
+ return stream[method].apply(stream, arguments);
+ };
+ }(i);
}
}
// proxy certain important events.
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
- forEach(events, function(ev) {
+ forEach(events, function (ev) {
stream.on(ev, self.emit.bind(self, ev));
});
// when we try to consume some more bytes, simply unpause the
// underlying stream.
- self._read = function(n) {
+ self._read = function (n) {
debug('wrapped _read', n);
if (paused) {
paused = false;
@@ -867,7 +792,6 @@ Readable.prototype.wrap = function(stream) {
return self;
};
-
// exposed for testing purposes only.
Readable._fromList = fromList;
@@ -881,21 +805,11 @@ function fromList(n, state) {
var ret;
// nothing in the list, definitely empty.
- if (list.length === 0)
- return null;
+ if (list.length === 0) return null;
- if (length === 0)
- ret = null;
- else if (objectMode)
- ret = list.shift();
- else if (!n || n >= length) {
+ if (length === 0) ret = null;else if (objectMode) ret = list.shift();else if (!n || n >= length) {
// read it all, truncate the array.
- if (stringMode)
- ret = list.join('');
- else if (list.length === 1)
- ret = list[0];
- else
- ret = Buffer.concat(list, length);
+ if (stringMode) ret = list.join('');else if (list.length === 1) ret = list[0];else ret = Buffer.concat(list, length);
list.length = 0;
} else {
// read just some of it.
@@ -911,25 +825,16 @@ function fromList(n, state) {
} else {
// complex case.
// we have enough to cover it, but it spans past the first buffer.
- if (stringMode)
- ret = '';
- else
- ret = new Buffer(n);
+ if (stringMode) ret = '';else ret = new Buffer(n);
var c = 0;
for (var i = 0, l = list.length; i < l && c < n; i++) {
var buf = list[0];
var cpy = Math.min(n - c, buf.length);
- if (stringMode)
- ret += buf.slice(0, cpy);
- else
- buf.copy(ret, c, 0, cpy);
+ if (stringMode) ret += buf.slice(0, cpy);else buf.copy(ret, c, 0, cpy);
- if (cpy < buf.length)
- list[0] = buf.slice(cpy);
- else
- list.shift();
+ if (cpy < buf.length) list[0] = buf.slice(cpy);else list.shift();
c += cpy;
}
@@ -944,8 +849,7 @@ function endReadable(stream) {
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
- if (state.length > 0)
- throw new Error('endReadable called on non-empty stream');
+ if (state.length > 0) throw new Error('endReadable called on non-empty stream');
if (!state.endEmitted) {
state.ended = true;
@@ -962,15 +866,15 @@ function endReadableNT(state, stream) {
}
}
-function forEach (xs, f) {
+function forEach(xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
}
-function indexOf (xs, x) {
+function indexOf(xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
-}
+}
\ No newline at end of file
diff --git a/lib/_stream_transform.js b/lib/_stream_transform.js
index 8c5f260139..625cdc1769 100644
--- a/lib/_stream_transform.js
+++ b/lib/_stream_transform.js
@@ -53,9 +53,8 @@ util.inherits = require('inherits');
util.inherits(Transform, Duplex);
-
function TransformState(stream) {
- this.afterTransform = function(er, data) {
+ this.afterTransform = function (er, data) {
return afterTransform(stream, er, data);
};
@@ -72,14 +71,12 @@ function afterTransform(stream, er, data) {
var cb = ts.writecb;
- if (!cb)
- return stream.emit('error', new Error('no writecb in Transform class'));
+ if (!cb) return stream.emit('error', new Error('no writecb in Transform class'));
ts.writechunk = null;
ts.writecb = null;
- if (data !== null && data !== undefined)
- stream.push(data);
+ if (data !== null && data !== undefined) stream.push(data);
cb(er);
@@ -90,10 +87,8 @@ function afterTransform(stream, er, data) {
}
}
-
function Transform(options) {
- if (!(this instanceof Transform))
- return new Transform(options);
+ if (!(this instanceof Transform)) return new Transform(options);
Duplex.call(this, options);
@@ -111,24 +106,19 @@ function Transform(options) {
this._readableState.sync = false;
if (options) {
- if (typeof options.transform === 'function')
- this._transform = options.transform;
+ if (typeof options.transform === 'function') this._transform = options.transform;
- if (typeof options.flush === 'function')
- this._flush = options.flush;
+ if (typeof options.flush === 'function') this._flush = options.flush;
}
- this.once('prefinish', function() {
- if (typeof this._flush === 'function')
- this._flush(function(er) {
- done(stream, er);
- });
- else
- done(stream);
+ this.once('prefinish', function () {
+ if (typeof this._flush === 'function') this._flush(function (er) {
+ done(stream, er);
+ });else done(stream);
});
}
-Transform.prototype.push = function(chunk, encoding) {
+Transform.prototype.push = function (chunk, encoding) {
this._transformState.needTransform = false;
return Duplex.prototype.push.call(this, chunk, encoding);
};
@@ -143,28 +133,25 @@ Transform.prototype.push = function(chunk, encoding) {
// Call `cb(err)` when you are done with this chunk. If you pass
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
-Transform.prototype._transform = function(chunk, encoding, cb) {
+Transform.prototype._transform = function (chunk, encoding, cb) {
throw new Error('not implemented');
};
-Transform.prototype._write = function(chunk, encoding, cb) {
+Transform.prototype._write = function (chunk, encoding, cb) {
var ts = this._transformState;
ts.writecb = cb;
ts.writechunk = chunk;
ts.writeencoding = encoding;
if (!ts.transforming) {
var rs = this._readableState;
- if (ts.needTransform ||
- rs.needReadable ||
- rs.length < rs.highWaterMark)
- this._read(rs.highWaterMark);
+ if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
}
};
// Doesn't matter what the args are here.
// _transform does all the work.
// That we got here means that the readable side wants more data.
-Transform.prototype._read = function(n) {
+Transform.prototype._read = function (n) {
var ts = this._transformState;
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
@@ -177,21 +164,17 @@ Transform.prototype._read = function(n) {
}
};
-
function done(stream, er) {
- if (er)
- return stream.emit('error', er);
+ if (er) return stream.emit('error', er);
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var ts = stream._transformState;
- if (ws.length)
- throw new Error('calling transform done when ws.length != 0');
+ if (ws.length) throw new Error('calling transform done when ws.length != 0');
- if (ts.transforming)
- throw new Error('calling transform done when still transforming');
+ if (ts.transforming) throw new Error('calling transform done when still transforming');
return stream.push(null);
-}
+}
\ No newline at end of file
diff --git a/lib/_stream_writable.js b/lib/_stream_writable.js
index 58773d38f8..b93312f6be 100644
--- a/lib/_stream_writable.js
+++ b/lib/_stream_writable.js
@@ -10,36 +10,32 @@ module.exports = Writable;
var processNextTick = require('process-nextick-args');
/**/
-
/**/
var Buffer = require('buffer').Buffer;
/**/
Writable.WritableState = WritableState;
-
/**/
var util = require('core-util-is');
util.inherits = require('inherits');
/**/
-
/**/
var internalUtil = {
deprecate: require('util-deprecate')
};
/**/
-
-
/**/
var Stream;
-(function (){try{
- Stream = require('st' + 'ream');
-}catch(_){}finally{
- if (!Stream)
- Stream = require('events').EventEmitter;
-}}())
+(function () {
+ try {
+ Stream = require('st' + 'ream');
+ } catch (_) {} finally {
+ if (!Stream) Stream = require('events').EventEmitter;
+ }
+})();
/**/
var Buffer = require('buffer').Buffer;
@@ -65,18 +61,17 @@ function WritableState(options, stream) {
// contains buffers or objects.
this.objectMode = !!options.objectMode;
- if (stream instanceof Duplex)
- this.objectMode = this.objectMode || !!options.writableObjectMode;
+ if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
// the point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
// the entire buffer is not flushed immediately on write()
var hwm = options.highWaterMark;
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
- this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm;
+ this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
// cast to ints.
- this.highWaterMark = ~~this.highWaterMark;
+ this.highWaterMark = ~ ~this.highWaterMark;
this.needDrain = false;
// at the start of calling end()
@@ -120,7 +115,7 @@ function WritableState(options, stream) {
this.bufferProcessing = false;
// the callback that's passed to _write(chunk,cb)
- this.onwrite = function(er) {
+ this.onwrite = function (er) {
onwrite(stream, er);
};
@@ -143,6 +138,14 @@ function WritableState(options, stream) {
// True if the error was already emitted and should not be thrown again
this.errorEmitted = false;
+
+ // count buffered requests
+ this.bufferedRequestCount = 0;
+
+ // create the two objects needed to store the corked requests
+ // they are not a linked list, as no new elements are inserted in there
+ this.corkedRequestsFree = new CorkedRequest(this);
+ this.corkedRequestsFree.next = new CorkedRequest(this);
}
WritableState.prototype.getBuffer = function writableStateGetBuffer() {
@@ -155,15 +158,15 @@ WritableState.prototype.getBuffer = function writableStateGetBuffer() {
return out;
};
-(function (){try {
-Object.defineProperty(WritableState.prototype, 'buffer', {
- get: internalUtil.deprecate(function() {
- return this.getBuffer();
- }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' +
- 'instead.')
-});
-}catch(_){}}());
-
+(function () {
+ try {
+ Object.defineProperty(WritableState.prototype, 'buffer', {
+ get: internalUtil.deprecate(function () {
+ return this.getBuffer();
+ }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.')
+ });
+ } catch (_) {}
+})();
var Duplex;
function Writable(options) {
@@ -171,8 +174,7 @@ function Writable(options) {
// Writable ctor is applied to Duplexes, though they're not
// instanceof Writable, they're instanceof Readable.
- if (!(this instanceof Writable) && !(this instanceof Duplex))
- return new Writable(options);
+ if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options);
this._writableState = new WritableState(options, this);
@@ -180,22 +182,19 @@ function Writable(options) {
this.writable = true;
if (options) {
- if (typeof options.write === 'function')
- this._write = options.write;
+ if (typeof options.write === 'function') this._write = options.write;
- if (typeof options.writev === 'function')
- this._writev = options.writev;
+ if (typeof options.writev === 'function') this._writev = options.writev;
}
Stream.call(this);
}
// Otherwise people can pipe Writable streams, which is just wrong.
-Writable.prototype.pipe = function() {
+Writable.prototype.pipe = function () {
this.emit('error', new Error('Cannot pipe. Not readable.'));
};
-
function writeAfterEnd(stream, cb) {
var er = new Error('write after end');
// TODO: defer error events consistently everywhere, not just the cb
@@ -211,11 +210,7 @@ function writeAfterEnd(stream, cb) {
function validChunk(stream, state, chunk, cb) {
var valid = true;
- if (!(Buffer.isBuffer(chunk)) &&
- typeof chunk !== 'string' &&
- chunk !== null &&
- chunk !== undefined &&
- !state.objectMode) {
+ if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) {
var er = new TypeError('Invalid non-string/buffer chunk');
stream.emit('error', er);
processNextTick(cb, er);
@@ -224,7 +219,7 @@ function validChunk(stream, state, chunk, cb) {
return valid;
}
-Writable.prototype.write = function(chunk, encoding, cb) {
+Writable.prototype.write = function (chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
@@ -233,17 +228,11 @@ Writable.prototype.write = function(chunk, encoding, cb) {
encoding = null;
}
- if (Buffer.isBuffer(chunk))
- encoding = 'buffer';
- else if (!encoding)
- encoding = state.defaultEncoding;
+ if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
- if (typeof cb !== 'function')
- cb = nop;
+ if (typeof cb !== 'function') cb = nop;
- if (state.ended)
- writeAfterEnd(this, cb);
- else if (validChunk(this, state, chunk, cb)) {
+ if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) {
state.pendingcb++;
ret = writeOrBuffer(this, state, chunk, encoding, cb);
}
@@ -251,42 +240,31 @@ Writable.prototype.write = function(chunk, encoding, cb) {
return ret;
};
-Writable.prototype.cork = function() {
+Writable.prototype.cork = function () {
var state = this._writableState;
state.corked++;
};
-Writable.prototype.uncork = function() {
+Writable.prototype.uncork = function () {
var state = this._writableState;
if (state.corked) {
state.corked--;
- if (!state.writing &&
- !state.corked &&
- !state.finished &&
- !state.bufferProcessing &&
- state.bufferedRequest)
- clearBuffer(this, state);
+ if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
}
};
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
// node::ParseEncoding() requires lower case.
- if (typeof encoding === 'string')
- encoding = encoding.toLowerCase();
- if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64',
-'ucs2', 'ucs-2','utf16le', 'utf-16le', 'raw']
-.indexOf((encoding + '').toLowerCase()) > -1))
- throw new TypeError('Unknown encoding: ' + encoding);
+ if (typeof encoding === 'string') encoding = encoding.toLowerCase();
+ if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
this._writableState.defaultEncoding = encoding;
};
function decodeChunk(state, chunk, encoding) {
- if (!state.objectMode &&
- state.decodeStrings !== false &&
- typeof chunk === 'string') {
+ if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
chunk = new Buffer(chunk, encoding);
}
return chunk;
@@ -298,16 +276,14 @@ function decodeChunk(state, chunk, encoding) {
function writeOrBuffer(stream, state, chunk, encoding, cb) {
chunk = decodeChunk(state, chunk, encoding);
- if (Buffer.isBuffer(chunk))
- encoding = 'buffer';
+ if (Buffer.isBuffer(chunk)) encoding = 'buffer';
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
var ret = state.length < state.highWaterMark;
// we must ensure that previous needDrain will not be reset to false.
- if (!ret)
- state.needDrain = true;
+ if (!ret) state.needDrain = true;
if (state.writing || state.corked) {
var last = state.lastBufferedRequest;
@@ -317,6 +293,7 @@ function writeOrBuffer(stream, state, chunk, encoding, cb) {
} else {
state.bufferedRequest = state.lastBufferedRequest;
}
+ state.bufferedRequestCount += 1;
} else {
doWrite(stream, state, false, len, chunk, encoding, cb);
}
@@ -329,19 +306,13 @@ function doWrite(stream, state, writev, len, chunk, encoding, cb) {
state.writecb = cb;
state.writing = true;
state.sync = true;
- if (writev)
- stream._writev(chunk, state.onwrite);
- else
- stream._write(chunk, encoding, state.onwrite);
+ if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
state.sync = false;
}
function onwriteError(stream, state, sync, er, cb) {
--state.pendingcb;
- if (sync)
- processNextTick(cb, er);
- else
- cb(er);
+ if (sync) processNextTick(cb, er);else cb(er);
stream._writableState.errorEmitted = true;
stream.emit('error', er);
@@ -361,16 +332,11 @@ function onwrite(stream, er) {
onwriteStateUpdate(state);
- if (er)
- onwriteError(stream, state, sync, er, cb);
- else {
+ if (er) onwriteError(stream, state, sync, er, cb);else {
// Check if we're actually ready to finish, but don't emit yet
var finished = needFinish(state);
- if (!finished &&
- !state.corked &&
- !state.bufferProcessing &&
- state.bufferedRequest) {
+ if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
clearBuffer(stream, state);
}
@@ -383,8 +349,7 @@ function onwrite(stream, er) {
}
function afterWrite(stream, state, finished, cb) {
- if (!finished)
- onwriteDrain(stream, state);
+ if (!finished) onwriteDrain(stream, state);
state.pendingcb--;
cb();
finishMaybe(stream, state);
@@ -400,7 +365,6 @@ function onwriteDrain(stream, state) {
}
}
-
// if there's something in the buffer waiting, then process it
function clearBuffer(stream, state) {
state.bufferProcessing = true;
@@ -408,26 +372,26 @@ function clearBuffer(stream, state) {
if (stream._writev && entry && entry.next) {
// Fast case, write everything using _writev()
- var buffer = [];
- var cbs = [];
+ var l = state.bufferedRequestCount;
+ var buffer = new Array(l);
+ var holder = state.corkedRequestsFree;
+ holder.entry = entry;
+
+ var count = 0;
while (entry) {
- cbs.push(entry.callback);
- buffer.push(entry);
+ buffer[count] = entry;
entry = entry.next;
+ count += 1;
}
- // count the one we are adding, as well.
- // TODO(isaacs) clean this up
+ doWrite(stream, state, true, state.length, buffer, '', holder.finish);
+
+ // doWrite is always async, defer these to save a bit of time
+ // as the hot path ends with doWrite
state.pendingcb++;
state.lastBufferedRequest = null;
- doWrite(stream, state, true, state.length, buffer, '', function(err) {
- for (var i = 0; i < cbs.length; i++) {
- state.pendingcb--;
- cbs[i](err);
- }
- });
-
- // Clear buffer
+ state.corkedRequestsFree = holder.next;
+ holder.next = null;
} else {
// Slow case, write chunks one-by-one
while (entry) {
@@ -447,20 +411,21 @@ function clearBuffer(stream, state) {
}
}
- if (entry === null)
- state.lastBufferedRequest = null;
+ if (entry === null) state.lastBufferedRequest = null;
}
+
+ state.bufferedRequestCount = 0;
state.bufferedRequest = entry;
state.bufferProcessing = false;
}
-Writable.prototype._write = function(chunk, encoding, cb) {
+Writable.prototype._write = function (chunk, encoding, cb) {
cb(new Error('not implemented'));
};
Writable.prototype._writev = null;
-Writable.prototype.end = function(chunk, encoding, cb) {
+Writable.prototype.end = function (chunk, encoding, cb) {
var state = this._writableState;
if (typeof chunk === 'function') {
@@ -472,8 +437,7 @@ Writable.prototype.end = function(chunk, encoding, cb) {
encoding = null;
}
- if (chunk !== null && chunk !== undefined)
- this.write(chunk, encoding);
+ if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
// .end() fully uncorks
if (state.corked) {
@@ -482,17 +446,11 @@ Writable.prototype.end = function(chunk, encoding, cb) {
}
// ignore unnecessary end() calls.
- if (!state.ending && !state.finished)
- endWritable(this, state, cb);
+ if (!state.ending && !state.finished) endWritable(this, state, cb);
};
-
function needFinish(state) {
- return (state.ending &&
- state.length === 0 &&
- state.bufferedRequest === null &&
- !state.finished &&
- !state.writing);
+ return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
}
function prefinish(stream, state) {
@@ -520,11 +478,33 @@ function endWritable(stream, state, cb) {
state.ending = true;
finishMaybe(stream, state);
if (cb) {
- if (state.finished)
- processNextTick(cb);
- else
- stream.once('finish', cb);
+ if (state.finished) processNextTick(cb);else stream.once('finish', cb);
}
state.ended = true;
stream.writable = false;
}
+
+// It seems a linked list but it is not
+// there will be only 2 of these for each stream
+function CorkedRequest(state) {
+ var _this = this;
+
+ this.next = null;
+ this.entry = null;
+
+ this.finish = function (err) {
+ var entry = _this.entry;
+ _this.entry = null;
+ while (entry) {
+ var cb = entry.callback;
+ state.pendingcb--;
+ cb(err);
+ entry = entry.next;
+ }
+ if (state.corkedRequestsFree) {
+ state.corkedRequestsFree.next = _this;
+ } else {
+ state.corkedRequestsFree = _this;
+ }
+ };
+}
\ No newline at end of file
diff --git a/test/common.js b/test/common.js
index f28996fa17..ef0793cd45 100644
--- a/test/common.js
+++ b/test/common.js
@@ -1,7 +1,8 @@
/**/
- var util = require('util');
- for (var i in util) exports[i] = util[i];
- /**//**/
+var util = require('util');
+for (var i in util) {
+ exports[i] = util[i];
+} /**/ /**/
if (!global.setImmediate) {
global.setImmediate = function setImmediate(fn) {
return setTimeout(fn.bind.apply(fn, arguments), 0);
@@ -9,7 +10,7 @@ if (!global.setImmediate) {
}
if (!global.clearImmediate) {
global.clearImmediate = function clearImmediate(i) {
- return clearTimeout(i);
+ return clearTimeout(i);
};
}
/**/
@@ -19,9 +20,10 @@ if (!global.clearImmediate) {
/**/
var objectKeys = objectKeys || function (obj) {
var keys = [];
- for (var key in obj) keys.push(key);
- return keys;
-}
+ for (var key in obj) {
+ keys.push(key);
+ }return keys;
+};
/**/
var path = require('path');
@@ -36,9 +38,7 @@ var util = require('core-util-is');
util.inherits = require('inherits');
/**/
-
-var testRoot = path.resolve(process.env.NODE_TEST_DIR ||
- path.dirname(__filename));
+var testRoot = path.resolve(process.env.NODE_TEST_DIR || path.dirname(__filename));
exports.testDir = path.dirname(__filename);
exports.fixturesDir = path.join(exports.testDir, 'fixtures');
@@ -47,9 +47,7 @@ exports.tmpDirName = 'tmp';
exports.PORT = +process.env.NODE_COMMON_PORT || 12346;
exports.isWindows = process.platform === 'win32';
exports.isAix = process.platform === 'aix';
-exports.isLinuxPPCBE = (process.platform === 'linux') &&
- (process.arch === 'ppc64') &&
- (os.endianness() === 'BE');
+exports.isLinuxPPCBE = process.platform === 'linux' && process.arch === 'ppc64' && os.endianness() === 'BE';
exports.isSunOS = process.platform === 'sunos';
exports.isFreeBSD = process.platform === 'freebsd';
@@ -59,22 +57,15 @@ function rimrafSync(p) {
try {
var st = fs.lstatSync(p);
} catch (e) {
- if (e.code === 'ENOENT')
- return;
+ if (e.code === 'ENOENT') return;
}
try {
- if (st && st.isDirectory())
- rmdirSync(p, null);
- else
- fs.unlinkSync(p);
+ if (st && st.isDirectory()) rmdirSync(p, null);else fs.unlinkSync(p);
} catch (e) {
- if (e.code === 'ENOENT')
- return;
- if (e.code === 'EPERM')
- return rmdirSync(p, e);
- if (e.code !== 'EISDIR')
- throw e;
+ if (e.code === 'ENOENT') return;
+ if (e.code === 'EPERM') return rmdirSync(p, e);
+ if (e.code !== 'EISDIR') throw e;
rmdirSync(p, e);
}
}
@@ -83,10 +74,9 @@ function rmdirSync(p, originalEr) {
try {
fs.rmdirSync(p);
} catch (e) {
- if (e.code === 'ENOTDIR')
- throw originalEr;
+ if (e.code === 'ENOTDIR') throw originalEr;
if (e.code === 'ENOTEMPTY' || e.code === 'EEXIST' || e.code === 'EPERM') {
- forEach(fs.readdirSync(p), function(f) {
+ forEach(fs.readdirSync(p), function (f) {
rimrafSync(path.join(p, f));
});
fs.rmdirSync(p);
@@ -94,7 +84,7 @@ function rmdirSync(p, originalEr) {
}
}
-exports.refreshTmpDir = function() {
+exports.refreshTmpDir = function () {
rimrafSync(exports.tmpDir);
fs.mkdirSync(exports.tmpDir);
};
@@ -110,104 +100,92 @@ var inFreeBSDJail = null;
var localhostIPv4 = null;
exports.localIPv6Hosts = [
- // Debian/Ubuntu
- 'ip6-localhost',
- 'ip6-loopback',
+// Debian/Ubuntu
+'ip6-localhost', 'ip6-loopback',
- // SUSE
- 'ipv6-localhost',
- 'ipv6-loopback',
+// SUSE
+'ipv6-localhost', 'ipv6-loopback',
- // Typically universal
- 'localhost',
-];
+// Typically universal
+'localhost'];
/**/if (!process.browser) {
-Object.defineProperty(exports, 'inFreeBSDJail', {
- get: function() {
- if (inFreeBSDJail !== null) return inFreeBSDJail;
-
- if (process.platform === 'freebsd' &&
- child_process.execSync('sysctl -n security.jail.jailed').toString() ===
- '1\n') {
- inFreeBSDJail = true;
- } else {
- inFreeBSDJail = false;
- }
- return inFreeBSDJail;
- }
-});
-}/**/
-
+ Object.defineProperty(exports, 'inFreeBSDJail', {
+ get: function () {
+ if (inFreeBSDJail !== null) return inFreeBSDJail;
-/**/if (!process.browser) {
-Object.defineProperty(exports, 'localhostIPv4', {
- get: function() {
- if (localhostIPv4 !== null) return localhostIPv4;
-
- if (exports.inFreeBSDJail) {
- // Jailed network interfaces are a bit special - since we need to jump
- // through loops, as well as this being an exception case, assume the
- // user will provide this instead.
- if (process.env.LOCALHOST) {
- localhostIPv4 = process.env.LOCALHOST;
+ if (process.platform === 'freebsd' && child_process.execSync('sysctl -n security.jail.jailed').toString() === '1\n') {
+ inFreeBSDJail = true;
} else {
- console.error('Looks like we\'re in a FreeBSD Jail. ' +
- 'Please provide your default interface address ' +
- 'as LOCALHOST or expect some tests to fail.');
+ inFreeBSDJail = false;
}
+ return inFreeBSDJail;
}
+ });
+} /**/
- if (localhostIPv4 === null) localhostIPv4 = '127.0.0.1';
+/**/if (!process.browser) {
+ Object.defineProperty(exports, 'localhostIPv4', {
+ get: function () {
+ if (localhostIPv4 !== null) return localhostIPv4;
+
+ if (exports.inFreeBSDJail) {
+ // Jailed network interfaces are a bit special - since we need to jump
+ // through loops, as well as this being an exception case, assume the
+ // user will provide this instead.
+ if (process.env.LOCALHOST) {
+ localhostIPv4 = process.env.LOCALHOST;
+ } else {
+ console.error('Looks like we\'re in a FreeBSD Jail. ' + 'Please provide your default interface address ' + 'as LOCALHOST or expect some tests to fail.');
+ }
+ }
- return localhostIPv4;
- }
-});
-}/**/
+ if (localhostIPv4 === null) localhostIPv4 = '127.0.0.1';
+ return localhostIPv4;
+ }
+ });
+} /**/
// opensslCli defined lazily to reduce overhead of spawnSync
/**/if (!process.browser) {
-Object.defineProperty(exports, 'opensslCli', {get: function() {
- if (opensslCli !== null) return opensslCli;
-
- if (process.config.variables.node_shared_openssl) {
- // use external command
- opensslCli = 'openssl';
- } else {
- // use command built from sources included in Node.js repository
- opensslCli = path.join(path.dirname(process.execPath), 'openssl-cli');
- }
+ Object.defineProperty(exports, 'opensslCli', { get: function () {
+ if (opensslCli !== null) return opensslCli;
- if (exports.isWindows) opensslCli += '.exe';
+ if (process.config.variables.node_shared_openssl) {
+ // use external command
+ opensslCli = 'openssl';
+ } else {
+ // use command built from sources included in Node.js repository
+ opensslCli = path.join(path.dirname(process.execPath), 'openssl-cli');
+ }
- var openssl_cmd = child_process.spawnSync(opensslCli, ['version']);
- if (openssl_cmd.status !== 0 || openssl_cmd.error !== undefined) {
- // openssl command cannot be executed
- opensslCli = false;
- }
- return opensslCli;
-}, enumerable: true });
-}/**/
+ if (exports.isWindows) opensslCli += '.exe';
+ var openssl_cmd = child_process.spawnSync(opensslCli, ['version']);
+ if (openssl_cmd.status !== 0 || openssl_cmd.error !== undefined) {
+ // openssl command cannot be executed
+ opensslCli = false;
+ }
+ return opensslCli;
+ }, enumerable: true });
+} /**/
/**/if (!process.browser) {
-Object.defineProperty(exports, 'hasCrypto', {
- get: function() {
- return process.versions.openssl ? true : false;
- }
-});
-}/**/
-
+ Object.defineProperty(exports, 'hasCrypto', {
+ get: function () {
+ return process.versions.openssl ? true : false;
+ }
+ });
+} /**/
/**/if (!process.browser) {
-Object.defineProperty(exports, 'hasFipsCrypto', {
- get: function() {
- return process.config.variables.openssl_fips ? true : false;
- }
-});
-}/**/
-
+ Object.defineProperty(exports, 'hasFipsCrypto', {
+ get: function () {
+ return process.config.variables.openssl_fips ? true : false;
+ }
+ });
+} /**/
if (exports.isWindows) {
exports.PIPE = '\\\\.\\pipe\\libuv-test';
@@ -221,43 +199,27 @@ if (exports.isWindows) {
if (exports.isWindows) {
exports.faketimeCli = false;
} else {
- exports.faketimeCli = path.join(__dirname, '..', 'tools', 'faketime', 'src',
- 'faketime');
+ exports.faketimeCli = path.join(__dirname, '..', 'tools', 'faketime', 'src', 'faketime');
}
var ifaces = os.networkInterfaces();
-exports.hasIPv6 = objectKeys(ifaces).some(function(name) {
- return /lo/.test(name) && ifaces[name].some(function(info) {
- return info.family === 'IPv6';
- });
+exports.hasIPv6 = objectKeys(ifaces).some(function (name) {
+ return (/lo/.test(name) && ifaces[name].some(function (info) {
+ return info.family === 'IPv6';
+ })
+ );
});
-function protoCtrChain(o) {
- var result = [];
- for (; o; o = o.__proto__) { result.push(o.constructor); }
- return result.join();
-}
-
-exports.indirectInstanceOf = function(obj, cls) {
- if (obj instanceof cls) { return true; }
- var clsChain = protoCtrChain(cls.prototype);
- var objChain = protoCtrChain(obj);
- return objChain.slice(-clsChain.length) === clsChain;
-};
-
-
-exports.ddCommand = function(filename, kilobytes) {
+exports.ddCommand = function (filename, kilobytes) {
if (exports.isWindows) {
var p = path.resolve(exports.fixturesDir, 'create-file.js');
- return '"' + process.argv[0] + '" "' + p + '" "' +
- filename + '" ' + (kilobytes * 1024);
+ return '"' + process.argv[0] + '" "' + p + '" "' + filename + '" ' + kilobytes * 1024;
} else {
return 'dd if=/dev/zero of="' + filename + '" bs=1024 count=' + kilobytes;
}
};
-
-exports.spawnCat = function(options) {
+exports.spawnCat = function (options) {
var spawn = require('child_process').spawn;
if (exports.isWindows) {
@@ -267,8 +229,7 @@ exports.spawnCat = function(options) {
}
};
-
-exports.spawnSyncCat = function(options) {
+exports.spawnSyncCat = function (options) {
var spawnSync = require('child_process').spawnSync;
if (exports.isWindows) {
@@ -278,8 +239,7 @@ exports.spawnSyncCat = function(options) {
}
};
-
-exports.spawnPwd = function(options) {
+exports.spawnPwd = function (options) {
var spawn = require('child_process').spawn;
if (exports.isWindows) {
@@ -289,35 +249,22 @@ exports.spawnPwd = function(options) {
}
};
-exports.platformTimeout = function(ms) {
- if (process.config.target_defaults.default_configuration === 'Debug')
- ms = 2 * ms;
+exports.platformTimeout = function (ms) {
+ if (process.config.target_defaults.default_configuration === 'Debug') ms = 2 * ms;
- if (process.arch !== 'arm')
- return ms;
+ if (process.arch !== 'arm') return ms;
var armv = process.config.variables.arm_version;
- if (armv === '6')
- return 7 * ms; // ARMv6
+ if (armv === '6') return 7 * ms; // ARMv6
- if (armv === '7')
- return 2 * ms; // ARMv7
+ if (armv === '7') return 2 * ms; // ARMv7
return ms; // ARMv8+
};
-var knownGlobals = [setTimeout,
- setInterval,
- setImmediate,
- clearTimeout,
- clearInterval,
- clearImmediate,
- console,
- constructor, // Enumerable in V8 3.21.
- Buffer,
- process,
- global];
+var knownGlobals = [setTimeout, setInterval, setImmediate, clearTimeout, clearInterval, clearImmediate, console, constructor, // Enumerable in V8 3.21.
+Buffer, process, global];
if (global.gc) {
knownGlobals.push(gc);
@@ -351,21 +298,20 @@ if (global.LTTNG_HTTP_SERVER_RESPONSE) {
}
/**/if (!process.browser) {
-if (global.ArrayBuffer) {
- knownGlobals.push(ArrayBuffer);
- knownGlobals.push(Int8Array);
- knownGlobals.push(Uint8Array);
- knownGlobals.push(Uint8ClampedArray);
- knownGlobals.push(Int16Array);
- knownGlobals.push(Uint16Array);
- knownGlobals.push(Int32Array);
- knownGlobals.push(Uint32Array);
- knownGlobals.push(Float32Array);
- knownGlobals.push(Float64Array);
- knownGlobals.push(DataView);
-}
-}/**/
-
+ if (global.ArrayBuffer) {
+ knownGlobals.push(ArrayBuffer);
+ knownGlobals.push(Int8Array);
+ knownGlobals.push(Uint8Array);
+ knownGlobals.push(Uint8ClampedArray);
+ knownGlobals.push(Int16Array);
+ knownGlobals.push(Uint16Array);
+ knownGlobals.push(Int32Array);
+ knownGlobals.push(Uint32Array);
+ knownGlobals.push(Float32Array);
+ knownGlobals.push(Float64Array);
+ knownGlobals.push(DataView);
+ }
+} /**/
// Harmony features.
if (global.Proxy) {
@@ -376,30 +322,25 @@ if (global.Symbol) {
knownGlobals.push(Symbol);
}
- /**/
- if (typeof constructor == 'function')
- knownGlobals.push(constructor);
- if (typeof DTRACE_NET_SOCKET_READ == 'function')
- knownGlobals.push(DTRACE_NET_SOCKET_READ);
- if (typeof DTRACE_NET_SOCKET_WRITE == 'function')
- knownGlobals.push(DTRACE_NET_SOCKET_WRITE);
- /**/
+/**/
+if (typeof constructor == 'function') knownGlobals.push(constructor);
+if (typeof DTRACE_NET_SOCKET_READ == 'function') knownGlobals.push(DTRACE_NET_SOCKET_READ);
+if (typeof DTRACE_NET_SOCKET_WRITE == 'function') knownGlobals.push(DTRACE_NET_SOCKET_WRITE);
+/**/
function leakedGlobals() {
var leaked = [];
- for (var val in global)
- if (-1 === knownGlobals.indexOf(global[val]))
- leaked.push(val);
-
- return leaked;
+ for (var val in global) {
+ if (-1 === knownGlobals.indexOf(global[val])) leaked.push(val);
+ }return leaked;
}
exports.leakedGlobals = leakedGlobals;
// Turn this off if the test should not check for global leaks.
exports.globalCheck = true;
-process.on('exit', function() {
+process.on('exit', function () {
if (!exports.globalCheck) return;
var leaked = leakedGlobals();
if (leaked.length > 0) {
@@ -408,36 +349,30 @@ process.on('exit', function() {
}
});
-
var mustCallChecks = [];
-
function runCallChecks(exitCode) {
if (exitCode !== 0) return;
- var failed = mustCallChecks.filter(function(context) {
+ var failed = mustCallChecks.filter(function (context) {
return context.actual !== context.expected;
});
- forEach(failed, function(context) {
- console.log('Mismatched %s function calls. Expected %d, actual %d.',
- context.name,
- context.expected,
- context.actual);
+ forEach(failed, function (context) {
+ console.log('Mismatched %s function calls. Expected %d, actual %d.', context.name, context.expected, context.actual);
console.log(context.stack.split('\n').slice(2).join('\n'));
});
if (failed.length) process.exit(1);
}
-
-exports.mustCall = function(fn, expected) {
+exports.mustCall = function (fn, expected) {
if (typeof expected !== 'number') expected = 1;
var context = {
expected: expected,
actual: 0,
- stack: (new Error()).stack,
+ stack: new Error().stack,
name: fn.name || ''
};
@@ -446,7 +381,7 @@ exports.mustCall = function(fn, expected) {
mustCallChecks.push(context);
- return function() {
+ return function () {
context.actual++;
return fn.apply(this, arguments);
};
@@ -454,8 +389,7 @@ exports.mustCall = function(fn, expected) {
var etcServicesFileName = path.join('/etc', 'services');
if (exports.isWindows) {
- etcServicesFileName = path.join(process.env.SystemRoot, 'System32', 'drivers',
- 'etc', 'services');
+ etcServicesFileName = path.join(process.env.SystemRoot, 'System32', 'drivers', 'etc', 'services');
}
/*
@@ -484,8 +418,7 @@ exports.getServiceName = function getServiceName(port, protocol) {
var serviceName = port.toString();
try {
- var servicesContent = fs.readFileSync(etcServicesFileName,
- { encoding: 'utf8'});
+ var servicesContent = fs.readFileSync(etcServicesFileName, { encoding: 'utf8' });
var regexp = '^(\w+)\s+\s' + port + '/' + protocol + '\s';
var re = new RegExp(regexp, 'm');
@@ -509,7 +442,7 @@ exports.hasMultiLocalhost = function hasMultiLocalhost() {
return ret === 0;
};
-exports.fileExists = function(pathname) {
+exports.fileExists = function (pathname) {
try {
fs.accessSync(pathname);
return true;
@@ -518,17 +451,17 @@ exports.fileExists = function(pathname) {
}
};
-exports.fail = function(msg) {
+exports.fail = function (msg) {
assert.fail(null, null, msg);
};
-
// A stream to push an array into a REPL
function ArrayStream() {
- this.run = function(data) {
- var self = this;
- forEach(data, function(line) {
- self.emit('data', line + '\n');
+ this.run = function (data) {
+ var _this = this;
+
+ forEach(data, function (line) {
+ _this.emit('data', line + '\n');
});
};
}
@@ -537,9 +470,9 @@ util.inherits(ArrayStream, stream.Stream);
exports.ArrayStream = ArrayStream;
ArrayStream.prototype.readable = true;
ArrayStream.prototype.writable = true;
-ArrayStream.prototype.pause = function() {};
-ArrayStream.prototype.resume = function() {};
-ArrayStream.prototype.write = function() {};
+ArrayStream.prototype.pause = function () {};
+ArrayStream.prototype.resume = function () {};
+ArrayStream.prototype.write = function () {};
// Returns true if the exit code "exitCode" and/or signal name "signal"
// represent the exit code and/or signal name of a node process that aborted,
@@ -558,8 +491,7 @@ exports.nodeProcessAborted = function nodeProcessAborted(exitCode, signal) {
// On Windows, v8's base::OS::Abort triggers an access violation,
// which corresponds to exit code 3221225477 (0xC0000005)
- if (process.platform === 'win32')
- expectedExitCodes = [3221225477];
+ if (process.platform === 'win32') expectedExitCodes = [3221225477];
// When using --abort-on-uncaught-exception, V8 will use
// base::OS::Abort to terminate the process.
@@ -575,7 +507,7 @@ exports.nodeProcessAborted = function nodeProcessAborted(exitCode, signal) {
}
};
-function forEach (xs, f) {
+function forEach(xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
@@ -583,8 +515,10 @@ function forEach (xs, f) {
if (!util._errnoException) {
var uv;
- util._errnoException = function(err, syscall) {
- if (util.isUndefined(uv)) try { uv = process.binding('uv'); } catch (e) {}
+ util._errnoException = function (err, syscall) {
+ if (util.isUndefined(uv)) try {
+ uv = process.binding('uv');
+ } catch (e) {}
var errname = uv ? uv.errname(err) : '';
var e = new Error(syscall + ' ' + errname);
e.code = errname;
@@ -592,4 +526,4 @@ if (!util._errnoException) {
e.syscall = syscall;
return e;
};
-}
+}
\ No newline at end of file
diff --git a/test/parallel/test-stream-big-packet.js b/test/parallel/test-stream-big-packet.js
index 82883993a7..2cef21e696 100644
--- a/test/parallel/test-stream-big-packet.js
+++ b/test/parallel/test-stream-big-packet.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
var util = require('util');
@@ -10,7 +11,7 @@ function PassThrough() {
stream.Transform.call(this);
}
util.inherits(PassThrough, stream.Transform);
-PassThrough.prototype._transform = function(chunk, encoding, done) {
+PassThrough.prototype._transform = function (chunk, encoding, done) {
this.push(chunk);
done();
};
@@ -19,7 +20,7 @@ function TestStream() {
stream.Transform.call(this);
}
util.inherits(TestStream, stream.Transform);
-TestStream.prototype._transform = function(chunk, encoding, done) {
+TestStream.prototype._transform = function (chunk, encoding, done) {
if (!passed) {
// Char 'a' only exists in the last write
passed = indexOf(chunk.toString(), 'a') >= 0;
@@ -32,7 +33,7 @@ var s2 = new PassThrough();
var s3 = new TestStream();
s1.pipe(s3);
// Don't let s2 auto close which may close s3
-s2.pipe(s3, {end: false});
+s2.pipe(s3, { end: false });
// We must write a buffer larger than highWaterMark
var big = new Buffer(s1._writableState.highWaterMark + 1);
@@ -48,13 +49,13 @@ assert(s2.write('tiny'));
setImmediate(s1.write.bind(s1), 'later');
// Assert after two IO loops when all operations have been done.
-process.on('exit', function() {
+process.on('exit', function () {
assert(passed, 'Large buffer is not handled properly by Writable Stream');
});
-function indexOf (xs, x) {
+function indexOf(xs, x) {
for (var i = 0, l = xs.length; i < l; i++) {
if (xs[i] === x) return i;
}
return -1;
-}
+}
\ No newline at end of file
diff --git a/test/parallel/test-stream-big-push.js b/test/parallel/test-stream-big-push.js
index e24f3bd9ec..7d420d230b 100644
--- a/test/parallel/test-stream-big-push.js
+++ b/test/parallel/test-stream-big-push.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
var stream = require('../../');
@@ -13,9 +14,9 @@ var reads = 0;
var eofed = false;
var ended = false;
-r._read = function(n) {
+r._read = function (n) {
if (reads === 0) {
- setTimeout(function() {
+ setTimeout(function () {
r.push(str);
});
reads++;
@@ -30,7 +31,7 @@ r._read = function(n) {
}
};
-r.on('end', function() {
+r.on('end', function () {
ended = true;
});
@@ -44,7 +45,7 @@ assert.equal(chunk, str);
chunk = r.read();
assert.equal(chunk, null);
-r.once('readable', function() {
+r.once('readable', function () {
// this time, we'll get *all* the remaining data, because
// it's been added synchronously, as the read WOULD take
// us below the hwm, and so it triggered a _read() again,
@@ -56,9 +57,9 @@ r.once('readable', function() {
assert.equal(chunk, null);
});
-process.on('exit', function() {
+process.on('exit', function () {
assert(eofed);
assert(ended);
assert.equal(reads, 2);
console.log('ok');
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-duplex.js b/test/parallel/test-stream-duplex.js
index c580b1402d..ff4f431592 100644
--- a/test/parallel/test-stream-duplex.js
+++ b/test/parallel/test-stream-duplex.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -12,21 +13,21 @@ assert(stream._writableState.objectMode);
var written;
var read;
-stream._write = function(obj, _, cb) {
+stream._write = function (obj, _, cb) {
written = obj;
cb();
};
-stream._read = function() {};
+stream._read = function () {};
-stream.on('data', function(obj) {
+stream.on('data', function (obj) {
read = obj;
});
stream.push({ val: 1 });
stream.end({ val: 2 });
-process.on('exit', function() {
+process.on('exit', function () {
assert(read.val === 1);
assert(written.val === 2);
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-end-paused.js b/test/parallel/test-stream-end-paused.js
index d507f444d2..6de967cb9d 100644
--- a/test/parallel/test-stream-end-paused.js
+++ b/test/parallel/test-stream-end-paused.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
var gotEnd = false;
@@ -8,26 +9,26 @@ var gotEnd = false;
var Readable = require('../../').Readable;
var stream = new Readable();
var calledRead = false;
-stream._read = function() {
+stream._read = function () {
assert(!calledRead);
calledRead = true;
this.push(null);
};
-stream.on('data', function() {
+stream.on('data', function () {
throw new Error('should not ever get data');
});
stream.pause();
-setTimeout(function() {
- stream.on('end', function() {
+setTimeout(function () {
+ stream.on('end', function () {
gotEnd = true;
});
stream.resume();
});
-process.on('exit', function() {
+process.on('exit', function () {
assert(gotEnd);
assert(calledRead);
console.log('ok');
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-ispaused.js b/test/parallel/test-stream-ispaused.js
index 4ae0844ea2..9d17b402cd 100644
--- a/test/parallel/test-stream-ispaused.js
+++ b/test/parallel/test-stream-ispaused.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -21,4 +22,4 @@ assert.ok(!readable.isPaused());
readable.pause();
assert.ok(readable.isPaused());
readable.resume();
-assert.ok(!readable.isPaused());
+assert.ok(!readable.isPaused());
\ No newline at end of file
diff --git a/test/parallel/test-stream-pipe-after-end.js b/test/parallel/test-stream-pipe-after-end.js
index ecdaccf272..0d8f6d3dcc 100644
--- a/test/parallel/test-stream-pipe-after-end.js
+++ b/test/parallel/test-stream-pipe-after-end.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -8,28 +9,25 @@ var util = require('util');
util.inherits(TestReadable, Readable);
function TestReadable(opt) {
- if (!(this instanceof TestReadable))
- return new TestReadable(opt);
+ if (!(this instanceof TestReadable)) return new TestReadable(opt);
Readable.call(this, opt);
this._ended = false;
}
-TestReadable.prototype._read = function(n) {
- if (this._ended)
- this.emit('error', new Error('_read called twice'));
+TestReadable.prototype._read = function (n) {
+ if (this._ended) this.emit('error', new Error('_read called twice'));
this._ended = true;
this.push(null);
};
util.inherits(TestWritable, Writable);
function TestWritable(opt) {
- if (!(this instanceof TestWritable))
- return new TestWritable(opt);
+ if (!(this instanceof TestWritable)) return new TestWritable(opt);
Writable.call(this, opt);
this._written = [];
}
-TestWritable.prototype._write = function(chunk, encoding, cb) {
+TestWritable.prototype._write = function (chunk, encoding, cb) {
this._written.push(chunk);
cb();
};
@@ -43,8 +41,8 @@ var piper = new TestReadable();
// pushes EOF null, and length=0, so this will trigger 'end'
piper.read();
-setTimeout(function() {
- ender.on('end', function() {
+setTimeout(function () {
+ ender.on('end', function () {
enderEnded = true;
});
assert(!enderEnded);
@@ -53,14 +51,14 @@ setTimeout(function() {
var w = new TestWritable();
var writableFinished = false;
- w.on('finish', function() {
+ w.on('finish', function () {
writableFinished = true;
});
piper.pipe(w);
- process.on('exit', function() {
+ process.on('exit', function () {
assert(enderEnded);
assert(writableFinished);
console.log('ok');
});
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-pipe-cleanup-pause.js b/test/parallel/test-stream-pipe-cleanup-pause.js
index 9805bccffc..213cd065ab 100644
--- a/test/parallel/test-stream-pipe-cleanup-pause.js
+++ b/test/parallel/test-stream-pipe-cleanup-pause.js
@@ -1,4 +1,5 @@
'use strict';
+
var common = require('../common');
var stream = require('../../');
@@ -11,27 +12,27 @@ var writer2 = new stream.Writable();
// See: https://github.com/nodejs/node/issues/2323
var buffer = new Buffer(560000);
-reader._read = function(n) {};
+reader._read = function (n) {};
-writer1._write = common.mustCall(function(chunk, encoding, cb) {
+writer1._write = common.mustCall(function (chunk, encoding, cb) {
this.emit('chunk-received');
cb();
}, 1);
-writer1.once('chunk-received', function() {
+writer1.once('chunk-received', function () {
reader.unpipe(writer1);
reader.pipe(writer2);
reader.push(buffer);
- setImmediate(function() {
+ setImmediate(function () {
reader.push(buffer);
- setImmediate(function() {
+ setImmediate(function () {
reader.push(buffer);
});
});
});
-writer2._write = common.mustCall(function(chunk, encoding, cb) {
+writer2._write = common.mustCall(function (chunk, encoding, cb) {
cb();
}, 3);
reader.pipe(writer1);
-reader.push(buffer);
+reader.push(buffer);
\ No newline at end of file
diff --git a/test/parallel/test-stream-pipe-cleanup.js b/test/parallel/test-stream-pipe-cleanup.js
index a1c2249c20..2de4d2233f 100644
--- a/test/parallel/test-stream-pipe-cleanup.js
+++ b/test/parallel/test-stream-pipe-cleanup.js
@@ -7,99 +7,100 @@ var stream = require('../../');
var assert = require('assert');
var util = require('util');
-if (/^v0\.8\./.test(process.version))
- return
-
-function Writable() {
- this.writable = true;
- this.endCalls = 0;
- require('stream').Stream.call(this);
-}
-util.inherits(Writable, require('stream').Stream);
-Writable.prototype.end = function() {
- this.endCalls++;
-};
-
-Writable.prototype.destroy = function() {
- this.endCalls++;
-};
-
-function Readable() {
- this.readable = true;
- require('stream').Stream.call(this);
-}
-util.inherits(Readable, require('stream').Stream);
-
-function Duplex() {
- this.readable = true;
- Writable.call(this);
-}
-util.inherits(Duplex, Writable);
-
-var i = 0;
-var limit = 100;
-
-var w = new Writable();
-
-var r;
-
-for (i = 0; i < limit; i++) {
- r = new Readable();
- r.pipe(w);
- r.emit('end');
-}
-assert.equal(0, r.listeners('end').length);
-assert.equal(limit, w.endCalls);
+(function () {
+ if (/^v0\.8\./.test(process.version)) return;
+
+ function Writable() {
+ this.writable = true;
+ this.endCalls = 0;
+ require('stream').Stream.call(this);
+ }
+ util.inherits(Writable, require('stream').Stream);
+ Writable.prototype.end = function () {
+ this.endCalls++;
+ };
+
+ Writable.prototype.destroy = function () {
+ this.endCalls++;
+ };
+
+ function Readable() {
+ this.readable = true;
+ require('stream').Stream.call(this);
+ }
+ util.inherits(Readable, require('stream').Stream);
+
+ function Duplex() {
+ this.readable = true;
+ Writable.call(this);
+ }
+ util.inherits(Duplex, Writable);
+
+ var i = 0;
+ var limit = 100;
+
+ var w = new Writable();
+
+ var r;
+
+ for (i = 0; i < limit; i++) {
+ r = new Readable();
+ r.pipe(w);
+ r.emit('end');
+ }
+ assert.equal(0, r.listeners('end').length);
+ assert.equal(limit, w.endCalls);
+
+ w.endCalls = 0;
+
+ for (i = 0; i < limit; i++) {
+ r = new Readable();
+ r.pipe(w);
+ r.emit('close');
+ }
+ assert.equal(0, r.listeners('close').length);
+ assert.equal(limit, w.endCalls);
+
+ w.endCalls = 0;
-w.endCalls = 0;
-
-for (i = 0; i < limit; i++) {
r = new Readable();
- r.pipe(w);
- r.emit('close');
-}
-assert.equal(0, r.listeners('close').length);
-assert.equal(limit, w.endCalls);
-
-w.endCalls = 0;
-r = new Readable();
+ for (i = 0; i < limit; i++) {
+ w = new Writable();
+ r.pipe(w);
+ w.emit('close');
+ }
+ assert.equal(0, w.listeners('close').length);
-for (i = 0; i < limit; i++) {
+ r = new Readable();
w = new Writable();
- r.pipe(w);
- w.emit('close');
-}
-assert.equal(0, w.listeners('close').length);
-
-r = new Readable();
-w = new Writable();
-var d = new Duplex();
-r.pipe(d); // pipeline A
-d.pipe(w); // pipeline B
-assert.equal(r.listeners('end').length, 2); // A.onend, A.cleanup
-assert.equal(r.listeners('close').length, 2); // A.onclose, A.cleanup
-assert.equal(d.listeners('end').length, 2); // B.onend, B.cleanup
-assert.equal(d.listeners('close').length, 3); // A.cleanup, B.onclose, B.cleanup
-assert.equal(w.listeners('end').length, 0);
-assert.equal(w.listeners('close').length, 1); // B.cleanup
-
-r.emit('end');
-assert.equal(d.endCalls, 1);
-assert.equal(w.endCalls, 0);
-assert.equal(r.listeners('end').length, 0);
-assert.equal(r.listeners('close').length, 0);
-assert.equal(d.listeners('end').length, 2); // B.onend, B.cleanup
-assert.equal(d.listeners('close').length, 2); // B.onclose, B.cleanup
-assert.equal(w.listeners('end').length, 0);
-assert.equal(w.listeners('close').length, 1); // B.cleanup
-
-d.emit('end');
-assert.equal(d.endCalls, 1);
-assert.equal(w.endCalls, 1);
-assert.equal(r.listeners('end').length, 0);
-assert.equal(r.listeners('close').length, 0);
-assert.equal(d.listeners('end').length, 0);
-assert.equal(d.listeners('close').length, 0);
-assert.equal(w.listeners('end').length, 0);
-assert.equal(w.listeners('close').length, 0);
+ var d = new Duplex();
+ r.pipe(d); // pipeline A
+ d.pipe(w); // pipeline B
+ assert.equal(r.listeners('end').length, 2); // A.onend, A.cleanup
+ assert.equal(r.listeners('close').length, 2); // A.onclose, A.cleanup
+ assert.equal(d.listeners('end').length, 2); // B.onend, B.cleanup
+ assert.equal(d.listeners('close').length, 3); // A.cleanup, B.onclose, B.cleanup
+ assert.equal(w.listeners('end').length, 0);
+ assert.equal(w.listeners('close').length, 1); // B.cleanup
+
+ r.emit('end');
+ assert.equal(d.endCalls, 1);
+ assert.equal(w.endCalls, 0);
+ assert.equal(r.listeners('end').length, 0);
+ assert.equal(r.listeners('close').length, 0);
+ assert.equal(d.listeners('end').length, 2); // B.onend, B.cleanup
+ assert.equal(d.listeners('close').length, 2); // B.onclose, B.cleanup
+ assert.equal(w.listeners('end').length, 0);
+ assert.equal(w.listeners('close').length, 1); // B.cleanup
+
+ d.emit('end');
+ assert.equal(d.endCalls, 1);
+ assert.equal(w.endCalls, 1);
+ assert.equal(r.listeners('end').length, 0);
+ assert.equal(r.listeners('close').length, 0);
+ assert.equal(d.listeners('end').length, 0);
+ assert.equal(d.listeners('close').length, 0);
+ assert.equal(w.listeners('end').length, 0);
+ assert.equal(w.listeners('close').length, 0);
+})();
\ No newline at end of file
diff --git a/test/parallel/test-stream-pipe-error-handling.js b/test/parallel/test-stream-pipe-error-handling.js
index 020d8d9e00..94adbd4f63 100644
--- a/test/parallel/test-stream-pipe-error-handling.js
+++ b/test/parallel/test-stream-pipe-error-handling.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
var Stream = require('stream').Stream;
@@ -10,7 +11,7 @@ var Stream = require('stream').Stream;
source.pipe(dest);
var gotErr = null;
- source.on('error', function(err) {
+ source.on('error', function (err) {
gotErr = err;
});
@@ -46,15 +47,15 @@ var Stream = require('stream').Stream;
var removed = false;
var didTest = false;
- process.on('exit', function() {
+ process.on('exit', function () {
assert(didTest);
console.log('ok');
});
- r._read = function() {
- setTimeout(function() {
+ r._read = function () {
+ setTimeout(function () {
assert(removed);
- assert.throws(function() {
+ assert.throws(function () {
w.emit('error', new Error('fail'));
});
didTest = true;
@@ -81,13 +82,13 @@ var Stream = require('stream').Stream;
var didTest = false;
var caught = false;
- process.on('exit', function() {
+ process.on('exit', function () {
assert(didTest);
console.log('ok');
});
- r._read = function() {
- setTimeout(function() {
+ r._read = function () {
+ setTimeout(function () {
assert(removed);
w.emit('error', new Error('fail'));
didTest = true;
@@ -95,15 +96,15 @@ var Stream = require('stream').Stream;
};
w.on('error', myOnError);
- w._write = function() {};
+ w._write = function () {};
r.pipe(w);
// Removing some OTHER random listener should not do anything
- w.removeListener('error', function() {});
+ w.removeListener('error', function () {});
removed = true;
function myOnError(er) {
assert(!caught);
caught = true;
}
-})();
+})();
\ No newline at end of file
diff --git a/test/parallel/test-stream-pipe-event.js b/test/parallel/test-stream-pipe-event.js
index f6fe959f51..f9f1e2af66 100644
--- a/test/parallel/test-stream-pipe-event.js
+++ b/test/parallel/test-stream-pipe-event.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var stream = require('../../');
var assert = require('assert');
@@ -19,11 +20,11 @@ util.inherits(Readable, require('stream').Stream);
var passed = false;
var w = new Writable();
-w.on('pipe', function(src) {
+w.on('pipe', function (src) {
passed = true;
});
var r = new Readable();
r.pipe(w);
-assert.ok(passed);
+assert.ok(passed);
\ No newline at end of file
diff --git a/test/parallel/test-stream-pipe-without-listenerCount.js b/test/parallel/test-stream-pipe-without-listenerCount.js
index 1a4e431aef..d215378cbd 100644
--- a/test/parallel/test-stream-pipe-without-listenerCount.js
+++ b/test/parallel/test-stream-pipe-without-listenerCount.js
@@ -1,4 +1,5 @@
'use strict';
+
var common = require('../common');
var stream = require('../../');
@@ -8,7 +9,7 @@ r.listenerCount = undefined;
var w = new stream.Stream();
w.listenerCount = undefined;
-w.on('pipe', function() {
+w.on('pipe', function () {
r.emit('error', new Error('Readable Error'));
w.emit('error', new Error('Writable Error'));
});
@@ -16,4 +17,4 @@ r.on('error', common.mustCall(noop));
w.on('error', common.mustCall(noop));
r.pipe(w);
-function noop() {}
+function noop() {}
\ No newline at end of file
diff --git a/test/parallel/test-stream-push-order.js b/test/parallel/test-stream-push-order.js
index 312a55da02..619ee7a113 100644
--- a/test/parallel/test-stream-push-order.js
+++ b/test/parallel/test-stream-push-order.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var Readable = require('../../').Readable;
var assert = require('assert');
@@ -10,7 +11,7 @@ var s = new Readable({
var list = ['1', '2', '3', '4', '5', '6'];
-s._read = function(n) {
+s._read = function (n) {
var one = list.shift();
if (!one) {
s.push(null);
@@ -25,8 +26,7 @@ s.read(0);
// ACTUALLY [1, 3, 5, 6, 4, 2]
-process.on('exit', function() {
- assert.deepEqual(s._readableState.buffer,
- ['1', '2', '3', '4', '5', '6']);
+process.on('exit', function () {
+ assert.deepEqual(s._readableState.buffer, ['1', '2', '3', '4', '5', '6']);
console.log('ok');
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-push-strings.js b/test/parallel/test-stream-push-strings.js
index b8ac3105d2..126da879ea 100644
--- a/test/parallel/test-stream-push-strings.js
+++ b/test/parallel/test-stream-push-strings.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -11,18 +12,18 @@ function MyStream(options) {
this._chunks = 3;
}
-MyStream.prototype._read = function(n) {
+MyStream.prototype._read = function (n) {
switch (this._chunks--) {
case 0:
return this.push(null);
case 1:
- return setTimeout(function() {
+ return setTimeout(function () {
this.push('last chunk');
}.bind(this), 100);
case 2:
return this.push('second to last chunk');
case 3:
- return process.nextTick(function() {
+ return process.nextTick(function () {
this.push('first chunk');
}.bind(this));
default:
@@ -32,15 +33,16 @@ MyStream.prototype._read = function(n) {
var ms = new MyStream();
var results = [];
-ms.on('readable', function() {
+ms.on('readable', function () {
var chunk;
- while (null !== (chunk = ms.read()))
+ while (null !== (chunk = ms.read())) {
results.push(chunk + '');
+ }
});
-var expect = [ 'first chunksecond to last chunk', 'last chunk' ];
-process.on('exit', function() {
+var expect = ['first chunksecond to last chunk', 'last chunk'];
+process.on('exit', function () {
assert.equal(ms._chunks, -1);
assert.deepEqual(results, expect);
console.log('ok');
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-readable-constructor-set-methods.js b/test/parallel/test-stream-readable-constructor-set-methods.js
index 3be55a4ff4..4787a1f0dc 100644
--- a/test/parallel/test-stream-readable-constructor-set-methods.js
+++ b/test/parallel/test-stream-readable-constructor-set-methods.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -13,7 +14,7 @@ function _read(n) {
var r = new Readable({ read: _read });
r.resume();
-process.on('exit', function() {
+process.on('exit', function () {
assert.equal(r._read, _read);
assert(_readCalled);
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-readable-event.js b/test/parallel/test-stream-readable-event.js
index c0a2a1c525..9f5c111932 100644
--- a/test/parallel/test-stream-readable-event.js
+++ b/test/parallel/test-stream-readable-event.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -12,7 +13,7 @@ var Readable = require('../../').Readable;
});
var _readCalled = false;
- r._read = function(n) {
+ r._read = function (n) {
_readCalled = true;
};
@@ -20,15 +21,15 @@ var Readable = require('../../').Readable;
r.push(new Buffer('blerg'));
var caughtReadable = false;
- setTimeout(function() {
+ setTimeout(function () {
// we're testing what we think we are
assert(!r._readableState.reading);
- r.on('readable', function() {
+ r.on('readable', function () {
caughtReadable = true;
});
});
- process.on('exit', function() {
+ process.on('exit', function () {
// we're testing what we think we are
assert(!_readCalled);
@@ -46,7 +47,7 @@ var Readable = require('../../').Readable;
});
var _readCalled = false;
- r._read = function(n) {
+ r._read = function (n) {
_readCalled = true;
};
@@ -54,15 +55,15 @@ var Readable = require('../../').Readable;
r.push(new Buffer('bl'));
var caughtReadable = false;
- setTimeout(function() {
+ setTimeout(function () {
// assert we're testing what we think we are
assert(r._readableState.reading);
- r.on('readable', function() {
+ r.on('readable', function () {
caughtReadable = true;
});
});
- process.on('exit', function() {
+ process.on('exit', function () {
// we're testing what we think we are
assert(_readCalled);
@@ -79,7 +80,7 @@ var Readable = require('../../').Readable;
});
var _readCalled = false;
- r._read = function(n) {
+ r._read = function (n) {
_readCalled = true;
};
@@ -88,19 +89,19 @@ var Readable = require('../../').Readable;
r.push(null);
var caughtReadable = false;
- setTimeout(function() {
+ setTimeout(function () {
// assert we're testing what we think we are
assert(!r._readableState.reading);
- r.on('readable', function() {
+ r.on('readable', function () {
caughtReadable = true;
});
});
- process.on('exit', function() {
+ process.on('exit', function () {
// we're testing what we think we are
assert(!_readCalled);
assert(caughtReadable);
console.log('ok 3');
});
-})();
+})();
\ No newline at end of file
diff --git a/test/parallel/test-stream-readable-flow-recursion.js b/test/parallel/test-stream-readable-flow-recursion.js
index f95c61d604..b7e1a3ef6d 100644
--- a/test/parallel/test-stream-readable-flow-recursion.js
+++ b/test/parallel/test-stream-readable-flow-recursion.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -15,14 +16,11 @@ process.throwDeprecation = true;
var stream = new Readable({ highWaterMark: 2 });
var reads = 0;
var total = 5000;
-stream._read = function(size) {
+stream._read = function (size) {
reads++;
size = Math.min(size, total);
total -= size;
- if (size === 0)
- stream.push(null);
- else
- stream.push(new Buffer(size));
+ if (size === 0) stream.push(null);else stream.push(new Buffer(size));
};
var depth = 0;
@@ -31,20 +29,17 @@ function flow(stream, size, callback) {
depth += 1;
var chunk = stream.read(size);
- if (!chunk)
- stream.once('readable', flow.bind(null, stream, size, callback));
- else
- callback(chunk);
+ if (!chunk) stream.once('readable', flow.bind(null, stream, size, callback));else callback(chunk);
depth -= 1;
console.log('flow(' + depth + '): exit');
}
-flow(stream, 5000, function() {
+flow(stream, 5000, function () {
console.log('complete (' + depth + ')');
});
-process.on('exit', function(code) {
+process.on('exit', function (code) {
assert.equal(reads, 2);
// we pushed up the high water mark
assert.equal(stream._readableState.highWaterMark, 8192);
@@ -53,4 +48,4 @@ process.on('exit', function(code) {
assert(!code);
assert.equal(depth, 0);
console.log('ok');
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-transform-constructor-set-methods.js b/test/parallel/test-stream-transform-constructor-set-methods.js
index 60cd27b26f..a7fe2c18fc 100644
--- a/test/parallel/test-stream-transform-constructor-set-methods.js
+++ b/test/parallel/test-stream-transform-constructor-set-methods.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -24,9 +25,9 @@ var t = new Transform({
t.end(new Buffer('blerg'));
t.resume();
-process.on('exit', function() {
+process.on('exit', function () {
assert.equal(t._transform, _transform);
assert.equal(t._flush, _flush);
assert(_transformCalled);
assert(_flushCalled);
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-transform-objectmode-falsey-value.js b/test/parallel/test-stream-transform-objectmode-falsey-value.js
index 42b73d5f65..50cbeca912 100644
--- a/test/parallel/test-stream-transform-objectmode-falsey-value.js
+++ b/test/parallel/test-stream-transform-objectmode-falsey-value.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -9,25 +10,25 @@ var src = new PassThrough({ objectMode: true });
var tx = new PassThrough({ objectMode: true });
var dest = new PassThrough({ objectMode: true });
-var expect = [ -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 ];
+var expect = [-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
var results = [];
-process.on('exit', function() {
+process.on('exit', function () {
assert.deepEqual(results, expect);
console.log('ok');
});
-dest.on('data', function(x) {
+dest.on('data', function (x) {
results.push(x);
});
src.pipe(tx).pipe(dest);
var i = -1;
-var int = setInterval(function() {
+var int = setInterval(function () {
if (i > 10) {
src.end();
clearInterval(int);
} else {
src.write(i++);
}
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-transform-split-objectmode.js b/test/parallel/test-stream-transform-split-objectmode.js
index a74a1ab575..d96824c64b 100644
--- a/test/parallel/test-stream-transform-split-objectmode.js
+++ b/test/parallel/test-stream-transform-split-objectmode.js
@@ -1,52 +1,52 @@
'use strict';
+
require('../common');
var assert = require('assert');
var Transform = require('../../').Transform;
-var parser = new Transform({ readableObjectMode : true });
+var parser = new Transform({ readableObjectMode: true });
assert(parser._readableState.objectMode);
assert(!parser._writableState.objectMode);
assert(parser._readableState.highWaterMark === 16);
-assert(parser._writableState.highWaterMark === (16 * 1024));
+assert(parser._writableState.highWaterMark === 16 * 1024);
-parser._transform = function(chunk, enc, callback) {
- callback(null, { val : chunk[0] });
+parser._transform = function (chunk, enc, callback) {
+ callback(null, { val: chunk[0] });
};
var parsed;
-parser.on('data', function(obj) {
+parser.on('data', function (obj) {
parsed = obj;
});
parser.end(new Buffer([42]));
-process.on('exit', function() {
+process.on('exit', function () {
assert(parsed.val === 42);
});
-
-var serializer = new Transform({ writableObjectMode : true });
+var serializer = new Transform({ writableObjectMode: true });
assert(!serializer._readableState.objectMode);
assert(serializer._writableState.objectMode);
-assert(serializer._readableState.highWaterMark === (16 * 1024));
+assert(serializer._readableState.highWaterMark === 16 * 1024);
assert(serializer._writableState.highWaterMark === 16);
-serializer._transform = function(obj, _, callback) {
+serializer._transform = function (obj, _, callback) {
callback(null, new Buffer([obj.val]));
};
var serialized;
-serializer.on('data', function(chunk) {
+serializer.on('data', function (chunk) {
serialized = chunk;
});
-serializer.write({ val : 42 });
+serializer.write({ val: 42 });
-process.on('exit', function() {
+process.on('exit', function () {
assert(serialized[0] === 42);
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-unshift-empty-chunk.js b/test/parallel/test-stream-unshift-empty-chunk.js
index 0675187fd9..a3b610dd85 100644
--- a/test/parallel/test-stream-unshift-empty-chunk.js
+++ b/test/parallel/test-stream-unshift-empty-chunk.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -11,15 +12,15 @@ var nChunks = 10;
var chunk = new Buffer(10);
chunk.fill('x');
-r._read = function(n) {
- setTimeout(function() {
+r._read = function (n) {
+ setTimeout(function () {
r.push(--nChunks === 0 ? null : chunk);
});
};
var readAll = false;
var seen = [];
-r.on('readable', function() {
+r.on('readable', function () {
var chunk;
while (chunk = r.read()) {
seen.push(chunk.toString());
@@ -35,27 +36,9 @@ r.on('readable', function() {
}
});
-var expect =
- [ 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy',
- 'xxxxxxxxxx',
- 'yyyyy' ];
+var expect = ['xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy', 'xxxxxxxxxx', 'yyyyy'];
-r.on('end', function() {
+r.on('end', function () {
assert.deepEqual(seen, expect);
console.log('ok');
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-unshift-read-race.js b/test/parallel/test-stream-unshift-read-race.js
index 8aba18fbf4..fae757cf09 100644
--- a/test/parallel/test-stream-unshift-read-race.js
+++ b/test/parallel/test-stream-unshift-read-race.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -22,7 +23,7 @@ for (var i = 0; i < data.length; i++) {
var pos = 0;
var pushedNull = false;
-r._read = function(n) {
+r._read = function (n) {
assert(!pushedNull, '_read after null push');
// every third chunk is fast
@@ -37,7 +38,7 @@ r._read = function(n) {
r.push(c);
if (c === null) pushError();
} else {
- setTimeout(function() {
+ setTimeout(function () {
pos += n;
r.push(c);
if (c === null) pushError();
@@ -47,40 +48,38 @@ r._read = function(n) {
};
function pushError() {
- assert.throws(function() {
+ assert.throws(function () {
r.push(new Buffer(1));
});
}
-
var w = stream.Writable();
var written = [];
-w._write = function(chunk, encoding, cb) {
+w._write = function (chunk, encoding, cb) {
written.push(chunk.toString());
cb();
};
var ended = false;
-r.on('end', function() {
+r.on('end', function () {
assert(!ended, 'end emitted more than once');
- assert.throws(function() {
+ assert.throws(function () {
r.unshift(new Buffer(1));
});
ended = true;
w.end();
});
-r.on('readable', function() {
+r.on('readable', function () {
var chunk;
while (null !== (chunk = r.read(10))) {
w.write(chunk);
- if (chunk.length > 4)
- r.unshift(new Buffer('1234'));
+ if (chunk.length > 4) r.unshift(new Buffer('1234'));
}
});
var finished = false;
-w.on('finish', function() {
+w.on('finish', function () {
finished = true;
// each chunk should start with 1234, and then be asfdasdfasdf...
// The first got pulled out before the first unshift('1234'), so it's
@@ -95,18 +94,22 @@ w.on('finish', function() {
var c = written[i].charAt(j);
assert.equal(c, asdf);
switch (asdf) {
- case 'a': asdf = 's'; break;
- case 's': asdf = 'd'; break;
- case 'd': asdf = 'f'; break;
- case 'f': asdf = 'a'; break;
+ case 'a':
+ asdf = 's';break;
+ case 's':
+ asdf = 'd';break;
+ case 'd':
+ asdf = 'f';break;
+ case 'f':
+ asdf = 'a';break;
}
}
}
});
-process.on('exit', function() {
+process.on('exit', function () {
assert.equal(written.length, 18);
assert(ended, 'stream ended');
assert(finished, 'stream finished');
console.log('ok');
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-writable-change-default-encoding.js b/test/parallel/test-stream-writable-change-default-encoding.js
index 1f4848b103..de95231682 100644
--- a/test/parallel/test-stream-writable-change-default-encoding.js
+++ b/test/parallel/test-stream-writable-change-default-encoding.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -12,41 +13,40 @@ function MyWritable(fn, options) {
util.inherits(MyWritable, stream.Writable);
-MyWritable.prototype._write = function(chunk, encoding, callback) {
+MyWritable.prototype._write = function (chunk, encoding, callback) {
this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding);
callback();
};
(function defaultCondingIsUtf8() {
- var m = new MyWritable(function(isBuffer, type, enc) {
+ var m = new MyWritable(function (isBuffer, type, enc) {
assert.equal(enc, 'utf8');
}, { decodeStrings: false });
m.write('foo');
m.end();
-}());
+})();
(function changeDefaultEncodingToAscii() {
- var m = new MyWritable(function(isBuffer, type, enc) {
+ var m = new MyWritable(function (isBuffer, type, enc) {
assert.equal(enc, 'ascii');
}, { decodeStrings: false });
m.setDefaultEncoding('ascii');
m.write('bar');
m.end();
-}());
+})();
assert.throws(function changeDefaultEncodingToInvalidValue() {
- var m = new MyWritable(function(isBuffer, type, enc) {
- }, { decodeStrings: false });
+ var m = new MyWritable(function (isBuffer, type, enc) {}, { decodeStrings: false });
m.setDefaultEncoding({});
m.write('bar');
m.end();
}, TypeError);
(function checkVairableCaseEncoding() {
- var m = new MyWritable(function(isBuffer, type, enc) {
+ var m = new MyWritable(function (isBuffer, type, enc) {
assert.equal(enc, 'ascii');
}, { decodeStrings: false });
m.setDefaultEncoding('AsCii');
m.write('bar');
m.end();
-}());
+})();
\ No newline at end of file
diff --git a/test/parallel/test-stream-writable-constructor-set-methods.js b/test/parallel/test-stream-writable-constructor-set-methods.js
index 56c7b0b14c..21644e198a 100644
--- a/test/parallel/test-stream-writable-constructor-set-methods.js
+++ b/test/parallel/test-stream-writable-constructor-set-methods.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -26,10 +27,10 @@ w2.write(new Buffer('blerg'));
w2.write(new Buffer('blerg'));
w2.end();
-process.on('exit', function() {
+process.on('exit', function () {
assert.equal(w._write, _write);
assert(_writeCalled);
assert.equal(w2._writev, _writev);
assert.equal(dLength, 2);
assert(_writevCalled);
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream-writable-decoded-encoding.js b/test/parallel/test-stream-writable-decoded-encoding.js
index e443fbd06e..66e4faaeee 100644
--- a/test/parallel/test-stream-writable-decoded-encoding.js
+++ b/test/parallel/test-stream-writable-decoded-encoding.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -12,13 +13,13 @@ function MyWritable(fn, options) {
util.inherits(MyWritable, stream.Writable);
-MyWritable.prototype._write = function(chunk, encoding, callback) {
+MyWritable.prototype._write = function (chunk, encoding, callback) {
this.fn(Buffer.isBuffer(chunk), typeof chunk, encoding);
callback();
};
(function decodeStringsTrue() {
- var m = new MyWritable(function(isBuffer, type, enc) {
+ var m = new MyWritable(function (isBuffer, type, enc) {
assert(isBuffer);
assert.equal(type, 'object');
assert.equal(enc, 'buffer');
@@ -29,7 +30,7 @@ MyWritable.prototype._write = function(chunk, encoding, callback) {
})();
(function decodeStringsFalse() {
- var m = new MyWritable(function(isBuffer, type, enc) {
+ var m = new MyWritable(function (isBuffer, type, enc) {
assert(!isBuffer);
assert.equal(type, 'string');
assert.equal(enc, 'utf8');
@@ -37,4 +38,4 @@ MyWritable.prototype._write = function(chunk, encoding, callback) {
}, { decodeStrings: false });
m.write('some-text', 'utf8');
m.end();
-})();
+})();
\ No newline at end of file
diff --git a/test/parallel/test-stream-writev.js b/test/parallel/test-stream-writev.js
index 0888a6734b..6ca6ab32a6 100644
--- a/test/parallel/test-stream-writev.js
+++ b/test/parallel/test-stream-writev.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -17,10 +18,7 @@ run();
function run() {
var t = queue.pop();
- if (t)
- test(t[0], t[1], t[2], run);
- else
- console.log('ok');
+ if (t) test(t[0], t[1], t[2], run);else console.log('ok');
}
function test(decode, uncork, multi, next) {
@@ -30,46 +28,31 @@ function test(decode, uncork, multi, next) {
function cnt(msg) {
expectCount++;
var expect = expectCount;
- return function(er) {
- if (er)
- throw er;
+ return function (er) {
+ if (er) throw er;
counter++;
assert.equal(counter, expect);
};
}
var w = new stream.Writable({ decodeStrings: decode });
- w._write = function(chunk, e, cb) {
+ w._write = function (chunk, e, cb) {
assert(false, 'Should not call _write');
};
- var expectChunks = decode ?
- [
- { encoding: 'buffer',
- chunk: [104, 101, 108, 108, 111, 44, 32] },
- { encoding: 'buffer',
- chunk: [119, 111, 114, 108, 100] },
- { encoding: 'buffer',
- chunk: [33] },
- { encoding: 'buffer',
- chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] },
- { encoding: 'buffer',
- chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173]}
- ] : [
- { encoding: 'ascii', chunk: 'hello, ' },
- { encoding: 'utf8', chunk: 'world' },
- { encoding: 'buffer', chunk: [33] },
- { encoding: 'binary', chunk: '\nand then...' },
- { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' }
- ];
+ var expectChunks = decode ? [{ encoding: 'buffer',
+ chunk: [104, 101, 108, 108, 111, 44, 32] }, { encoding: 'buffer',
+ chunk: [119, 111, 114, 108, 100] }, { encoding: 'buffer',
+ chunk: [33] }, { encoding: 'buffer',
+ chunk: [10, 97, 110, 100, 32, 116, 104, 101, 110, 46, 46, 46] }, { encoding: 'buffer',
+ chunk: [250, 206, 190, 167, 222, 173, 190, 239, 222, 202, 251, 173] }] : [{ encoding: 'ascii', chunk: 'hello, ' }, { encoding: 'utf8', chunk: 'world' }, { encoding: 'buffer', chunk: [33] }, { encoding: 'binary', chunk: '\nand then...' }, { encoding: 'hex', chunk: 'facebea7deadbeefdecafbad' }];
var actualChunks;
- w._writev = function(chunks, cb) {
- actualChunks = chunks.map(function(chunk) {
+ w._writev = function (chunks, cb) {
+ actualChunks = chunks.map(function (chunk) {
return {
encoding: chunk.encoding,
- chunk: Buffer.isBuffer(chunk.chunk) ?
- Array.prototype.slice.call(chunk.chunk) : chunk.chunk
+ chunk: Buffer.isBuffer(chunk.chunk) ? Array.prototype.slice.call(chunk.chunk) : chunk.chunk
};
});
cb();
@@ -79,26 +62,23 @@ function test(decode, uncork, multi, next) {
w.write('hello, ', 'ascii', cnt('hello'));
w.write('world', 'utf8', cnt('world'));
- if (multi)
- w.cork();
+ if (multi) w.cork();
w.write(new Buffer('!'), 'buffer', cnt('!'));
w.write('\nand then...', 'binary', cnt('and then'));
- if (multi)
- w.uncork();
+ if (multi) w.uncork();
w.write('facebea7deadbeefdecafbad', 'hex', cnt('hex'));
- if (uncork)
- w.uncork();
+ if (uncork) w.uncork();
w.end(cnt('end'));
- w.on('finish', function() {
+ w.on('finish', function () {
// make sure finish comes after all the write cb
cnt('finish')();
assert.deepEqual(expectChunks, actualChunks);
next();
});
-}
+}
\ No newline at end of file
diff --git a/test/parallel/test-stream2-base64-single-char-read-end.js b/test/parallel/test-stream2-base64-single-char-read-end.js
index db9adc5fd1..ad39c1395d 100644
--- a/test/parallel/test-stream2-base64-single-char-read-end.js
+++ b/test/parallel/test-stream2-base64-single-char-read-end.js
@@ -1,37 +1,38 @@
'use strict';
+
require('../common');
var R = require('../../lib/_stream_readable');
var W = require('../../lib/_stream_writable');
var assert = require('assert');
-var src = new R({encoding: 'base64'});
+var src = new R({ encoding: 'base64' });
var dst = new W();
var hasRead = false;
var accum = [];
var timeout;
-src._read = function(n) {
+src._read = function (n) {
if (!hasRead) {
hasRead = true;
- process.nextTick(function() {
+ process.nextTick(function () {
src.push(new Buffer('1'));
src.push(null);
});
}
};
-dst._write = function(chunk, enc, cb) {
+dst._write = function (chunk, enc, cb) {
accum.push(chunk);
cb();
};
-src.on('end', function() {
+src.on('end', function () {
assert.equal(Buffer.concat(accum) + '', 'MQ==');
clearTimeout(timeout);
});
src.pipe(dst);
-timeout = setTimeout(function() {
+timeout = setTimeout(function () {
assert.fail(null, null, 'timed out waiting for _write');
-}, 100);
+}, 100);
\ No newline at end of file
diff --git a/test/parallel/test-stream2-compatibility.js b/test/parallel/test-stream2-compatibility.js
index f190ddd762..06c4f3be21 100644
--- a/test/parallel/test-stream2-compatibility.js
+++ b/test/parallel/test-stream2-compatibility.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var R = require('../../lib/_stream_readable');
var W = require('../../lib/_stream_writable');
@@ -13,20 +14,20 @@ function TestReader() {
this._buffer = new Buffer(100);
this._buffer.fill('x');
- this.on('data', function() {
+ this.on('data', function () {
ondataCalled++;
});
}
util.inherits(TestReader, R);
-TestReader.prototype._read = function(n) {
+TestReader.prototype._read = function (n) {
this.push(this._buffer);
this._buffer = new Buffer(0);
};
var reader = new TestReader();
-setImmediate(function() {
+setImmediate(function () {
assert.equal(ondataCalled, 1);
console.log('ok');
reader.push(null);
@@ -40,14 +41,14 @@ function TestWriter() {
util.inherits(TestWriter, W);
-TestWriter.prototype._write = function(chunk, enc, cb) {
+TestWriter.prototype._write = function (chunk, enc, cb) {
cb();
};
var writer = new TestWriter();
-process.on('exit', function() {
+process.on('exit', function () {
assert.strictEqual(reader.readable, false);
assert.strictEqual(writer.writable, false);
console.log('ok');
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream2-decode-partial.js b/test/parallel/test-stream2-decode-partial.js
new file mode 100644
index 0000000000..6e4b24272c
--- /dev/null
+++ b/test/parallel/test-stream2-decode-partial.js
@@ -0,0 +1,24 @@
+'use strict';
+
+require('../common');
+var Readable = require('../../lib/_stream_readable');
+var assert = require('assert');
+
+var buf = '';
+var euro = new Buffer([0xE2, 0x82, 0xAC]);
+var cent = new Buffer([0xC2, 0xA2]);
+var source = Buffer.concat([euro, cent]);
+
+var readable = Readable({ encoding: 'utf8' });
+readable.push(source.slice(0, 2));
+readable.push(source.slice(2, 4));
+readable.push(source.slice(4, source.length));;
+readable.push(null);
+
+readable.on('data', function (data) {
+ buf += data;
+});
+
+process.on('exit', function () {
+ assert.strictEqual(buf, '€¢');
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream2-finish-pipe.js b/test/parallel/test-stream2-finish-pipe.js
index aa6a79dc02..39c91392f8 100644
--- a/test/parallel/test-stream2-finish-pipe.js
+++ b/test/parallel/test-stream2-finish-pipe.js
@@ -1,15 +1,16 @@
'use strict';
+
require('../common');
var stream = require('../../');
var Buffer = require('buffer').Buffer;
var r = new stream.Readable();
-r._read = function(size) {
+r._read = function (size) {
r.push(new Buffer(size));
};
var w = new stream.Writable();
-w._write = function(data, encoding, cb) {
+w._write = function (data, encoding, cb) {
cb(null);
};
@@ -18,4 +19,4 @@ r.pipe(w);
// This might sound unrealistic, but it happens in net.js. When
// `socket.allowHalfOpen === false`, EOF will cause `.destroySoon()` call which
// ends the writable side of net.Socket.
-w.end();
+w.end();
\ No newline at end of file
diff --git a/test/parallel/test-stream2-large-read-stall.js b/test/parallel/test-stream2-large-read-stall.js
index 6c9b23442a..7485f52517 100644
--- a/test/parallel/test-stream2-large-read-stall.js
+++ b/test/parallel/test-stream2-large-read-stall.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -18,7 +19,7 @@ var rs = r._readableState;
r._read = push;
-r.on('readable', function() {
+r.on('readable', function () {
;false && console.error('>> readable');
do {
;false && console.error(' > read(%d)', READSIZE);
@@ -26,22 +27,18 @@ r.on('readable', function() {
;false && console.error(' < %j (%d remain)', ret && ret.length, rs.length);
} while (ret && ret.length === READSIZE);
- ;false && console.error('<< after read()',
- ret && ret.length,
- rs.needReadable,
- rs.length);
+ ;false && console.error('<< after read()', ret && ret.length, rs.needReadable, rs.length);
});
var endEmitted = false;
-r.on('end', function() {
+r.on('end', function () {
endEmitted = true;
;false && console.error('end');
});
var pushes = 0;
function push() {
- if (pushes > PUSHCOUNT)
- return;
+ if (pushes > PUSHCOUNT) return;
if (pushes++ === PUSHCOUNT) {
;false && console.error(' push(EOF)');
@@ -49,11 +46,10 @@ function push() {
}
;false && console.error(' push #%d', pushes);
- if (r.push(new Buffer(PUSHSIZE)))
- setTimeout(push);
+ if (r.push(new Buffer(PUSHSIZE))) setTimeout(push);
}
-process.on('exit', function() {
+process.on('exit', function () {
assert.equal(pushes, PUSHCOUNT + 1);
assert(endEmitted);
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream2-objects.js b/test/parallel/test-stream2-objects.js
index f80ce7fc16..84222b542d 100644
--- a/test/parallel/test-stream2-objects.js
+++ b/test/parallel/test-stream2-objects.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var Readable = require('../../lib/_stream_readable');
var Writable = require('../../lib/_stream_writable');
@@ -15,8 +16,7 @@ function test(name, fn) {
function run() {
var next = tests.shift();
- if (!next)
- return console.error('ok');
+ if (!next) return console.error('ok');
var name = next[0];
var fn = next[1];
@@ -24,7 +24,7 @@ function run() {
fn({
same: assert.deepEqual,
equal: assert.equal,
- end: function() {
+ end: function () {
count--;
run();
}
@@ -32,7 +32,7 @@ function run() {
}
// ensure all tests have run
-process.on('exit', function() {
+process.on('exit', function () {
assert.equal(count, 0);
});
@@ -41,11 +41,11 @@ process.nextTick(run);
function toArray(callback) {
var stream = new Writable({ objectMode: true });
var list = [];
- stream.write = function(chunk) {
+ stream.write = function (chunk) {
list.push(chunk);
};
- stream.end = function() {
+ stream.end = function () {
callback(list);
};
@@ -55,7 +55,7 @@ function toArray(callback) {
function fromArray(list) {
var r = new Readable({ objectMode: true });
r._read = noop;
- forEach(list, function(chunk) {
+ forEach(list, function (chunk) {
r.push(chunk);
});
r.push(null);
@@ -65,8 +65,8 @@ function fromArray(list) {
function noop() {}
-test('can read objects from stream', function(t) {
- var r = fromArray([{ one: '1'}, { two: '2' }]);
+test('can read objects from stream', function (t) {
+ var r = fromArray([{ one: '1' }, { two: '2' }]);
var v1 = r.read();
var v2 = r.read();
@@ -79,21 +79,18 @@ test('can read objects from stream', function(t) {
t.end();
});
-test('can pipe objects into stream', function(t) {
- var r = fromArray([{ one: '1'}, { two: '2' }]);
+test('can pipe objects into stream', function (t) {
+ var r = fromArray([{ one: '1' }, { two: '2' }]);
- r.pipe(toArray(function(list) {
- assert.deepEqual(list, [
- { one: '1' },
- { two: '2' }
- ]);
+ r.pipe(toArray(function (list) {
+ assert.deepEqual(list, [{ one: '1' }, { two: '2' }]);
t.end();
}));
});
-test('read(n) is ignored', function(t) {
- var r = fromArray([{ one: '1'}, { two: '2' }]);
+test('read(n) is ignored', function (t) {
+ var r = fromArray([{ one: '1' }, { two: '2' }]);
var value = r.read(2);
@@ -102,63 +99,57 @@ test('read(n) is ignored', function(t) {
t.end();
});
-test('can read objects from _read (sync)', function(t) {
+test('can read objects from _read (sync)', function (t) {
var r = new Readable({ objectMode: true });
- var list = [{ one: '1'}, { two: '2' }];
- r._read = function(n) {
+ var list = [{ one: '1' }, { two: '2' }];
+ r._read = function (n) {
var item = list.shift();
r.push(item || null);
};
- r.pipe(toArray(function(list) {
- assert.deepEqual(list, [
- { one: '1' },
- { two: '2' }
- ]);
+ r.pipe(toArray(function (list) {
+ assert.deepEqual(list, [{ one: '1' }, { two: '2' }]);
t.end();
}));
});
-test('can read objects from _read (async)', function(t) {
+test('can read objects from _read (async)', function (t) {
var r = new Readable({ objectMode: true });
- var list = [{ one: '1'}, { two: '2' }];
- r._read = function(n) {
+ var list = [{ one: '1' }, { two: '2' }];
+ r._read = function (n) {
var item = list.shift();
- process.nextTick(function() {
+ process.nextTick(function () {
r.push(item || null);
});
};
- r.pipe(toArray(function(list) {
- assert.deepEqual(list, [
- { one: '1' },
- { two: '2' }
- ]);
+ r.pipe(toArray(function (list) {
+ assert.deepEqual(list, [{ one: '1' }, { two: '2' }]);
t.end();
}));
});
-test('can read strings as objects', function(t) {
+test('can read strings as objects', function (t) {
var r = new Readable({
objectMode: true
});
r._read = noop;
var list = ['one', 'two', 'three'];
- forEach(list, function(str) {
+ forEach(list, function (str) {
r.push(str);
});
r.push(null);
- r.pipe(toArray(function(array) {
+ r.pipe(toArray(function (array) {
assert.deepEqual(array, list);
t.end();
}));
});
-test('read(0) for object streams', function(t) {
+test('read(0) for object streams', function (t) {
var r = new Readable({
objectMode: true
});
@@ -167,14 +158,14 @@ test('read(0) for object streams', function(t) {
r.push('foobar');
r.push(null);
- r.pipe(toArray(function(array) {
+ r.pipe(toArray(function (array) {
assert.deepEqual(array, ['foobar']);
t.end();
}));
});
-test('falsey values', function(t) {
+test('falsey values', function (t) {
var r = new Readable({
objectMode: true
});
@@ -185,14 +176,14 @@ test('falsey values', function(t) {
r.push('');
r.push(null);
- r.pipe(toArray(function(array) {
+ r.pipe(toArray(function (array) {
assert.deepEqual(array, [false, 0, '']);
t.end();
}));
});
-test('high watermark _read', function(t) {
+test('high watermark _read', function (t) {
var r = new Readable({
highWaterMark: 6,
objectMode: true
@@ -200,11 +191,11 @@ test('high watermark _read', function(t) {
var calls = 0;
var list = ['1', '2', '3', '4', '5', '6', '7', '8'];
- r._read = function(n) {
+ r._read = function (n) {
calls++;
};
- forEach(list, function(c) {
+ forEach(list, function (c) {
r.push(c);
});
@@ -224,12 +215,12 @@ test('high watermark _read', function(t) {
t.end();
});
-test('high watermark push', function(t) {
+test('high watermark push', function (t) {
var r = new Readable({
highWaterMark: 6,
objectMode: true
});
- r._read = function(n) {};
+ r._read = function (n) {};
for (var i = 0; i < 6; i++) {
var bool = r.push(i);
assert.equal(bool, i === 5 ? false : true);
@@ -238,15 +229,15 @@ test('high watermark push', function(t) {
t.end();
});
-test('can write objects to stream', function(t) {
+test('can write objects to stream', function (t) {
var w = new Writable({ objectMode: true });
- w._write = function(chunk, encoding, cb) {
+ w._write = function (chunk, encoding, cb) {
assert.deepEqual(chunk, { foo: 'bar' });
cb();
};
- w.on('finish', function() {
+ w.on('finish', function () {
t.end();
});
@@ -254,16 +245,16 @@ test('can write objects to stream', function(t) {
w.end();
});
-test('can write multiple objects to stream', function(t) {
+test('can write multiple objects to stream', function (t) {
var w = new Writable({ objectMode: true });
var list = [];
- w._write = function(chunk, encoding, cb) {
+ w._write = function (chunk, encoding, cb) {
list.push(chunk);
cb();
};
- w.on('finish', function() {
+ w.on('finish', function () {
assert.deepEqual(list, [0, 1, 2, 3, 4]);
t.end();
@@ -277,18 +268,18 @@ test('can write multiple objects to stream', function(t) {
w.end();
});
-test('can write strings as objects', function(t) {
+test('can write strings as objects', function (t) {
var w = new Writable({
objectMode: true
});
var list = [];
- w._write = function(chunk, encoding, cb) {
+ w._write = function (chunk, encoding, cb) {
list.push(chunk);
process.nextTick(cb);
};
- w.on('finish', function() {
+ w.on('finish', function () {
assert.deepEqual(list, ['0', '1', '2', '3', '4']);
t.end();
@@ -302,22 +293,22 @@ test('can write strings as objects', function(t) {
w.end();
});
-test('buffers finish until cb is called', function(t) {
+test('buffers finish until cb is called', function (t) {
var w = new Writable({
objectMode: true
});
var called = false;
- w._write = function(chunk, encoding, cb) {
+ w._write = function (chunk, encoding, cb) {
assert.equal(chunk, 'foo');
- process.nextTick(function() {
+ process.nextTick(function () {
called = true;
cb();
});
};
- w.on('finish', function() {
+ w.on('finish', function () {
assert.equal(called, true);
t.end();
@@ -327,8 +318,8 @@ test('buffers finish until cb is called', function(t) {
w.end();
});
-function forEach (xs, f) {
+function forEach(xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
-}
+}
\ No newline at end of file
diff --git a/test/parallel/test-stream2-pipe-error-handling.js b/test/parallel/test-stream2-pipe-error-handling.js
index b7ae248371..81dd273ffc 100644
--- a/test/parallel/test-stream2-pipe-error-handling.js
+++ b/test/parallel/test-stream2-pipe-error-handling.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
var stream = require('../../');
@@ -7,32 +8,32 @@ var stream = require('../../');
var count = 1000;
var source = new stream.Readable();
- source._read = function(n) {
+ source._read = function (n) {
n = Math.min(count, n);
count -= n;
source.push(new Buffer(n));
};
var unpipedDest;
- source.unpipe = function(dest) {
+ source.unpipe = function (dest) {
unpipedDest = dest;
stream.Readable.prototype.unpipe.call(this, dest);
};
var dest = new stream.Writable();
- dest._write = function(chunk, encoding, cb) {
+ dest._write = function (chunk, encoding, cb) {
cb();
};
source.pipe(dest);
var gotErr = null;
- dest.on('error', function(err) {
+ dest.on('error', function (err) {
gotErr = err;
});
var unpipedSource;
- dest.on('unpipe', function(src) {
+ dest.on('unpipe', function (src) {
unpipedSource = src;
});
@@ -47,27 +48,27 @@ var stream = require('../../');
var count = 1000;
var source = new stream.Readable();
- source._read = function(n) {
+ source._read = function (n) {
n = Math.min(count, n);
count -= n;
source.push(new Buffer(n));
};
var unpipedDest;
- source.unpipe = function(dest) {
+ source.unpipe = function (dest) {
unpipedDest = dest;
stream.Readable.prototype.unpipe.call(this, dest);
};
var dest = new stream.Writable();
- dest._write = function(chunk, encoding, cb) {
+ dest._write = function (chunk, encoding, cb) {
cb();
};
source.pipe(dest);
var unpipedSource;
- dest.on('unpipe', function(src) {
+ dest.on('unpipe', function (src) {
unpipedSource = src;
});
@@ -82,4 +83,4 @@ var stream = require('../../');
assert.strictEqual(gotErr, err);
assert.strictEqual(unpipedSource, source);
assert.strictEqual(unpipedDest, dest);
-})();
+})();
\ No newline at end of file
diff --git a/test/parallel/test-stream2-pipe-error-once-listener.js b/test/parallel/test-stream2-pipe-error-once-listener.js
index ac89541fd8..cab6aefceb 100644
--- a/test/parallel/test-stream2-pipe-error-once-listener.js
+++ b/test/parallel/test-stream2-pipe-error-once-listener.js
@@ -1,27 +1,26 @@
'use strict';
+
require('../common');
var util = require('util');
var stream = require('../../');
-
-var Read = function() {
+var Read = function () {
stream.Readable.call(this);
};
util.inherits(Read, stream.Readable);
-Read.prototype._read = function(size) {
+Read.prototype._read = function (size) {
this.push('x');
this.push(null);
};
-
-var Write = function() {
+var Write = function () {
stream.Writable.call(this);
};
util.inherits(Write, stream.Writable);
-Write.prototype._write = function(buffer, encoding, cb) {
+Write.prototype._write = function (buffer, encoding, cb) {
this.emit('error', new Error('boom'));
this.emit('alldone');
};
@@ -29,14 +28,13 @@ Write.prototype._write = function(buffer, encoding, cb) {
var read = new Read();
var write = new Write();
-write.once('error', function(err) {});
-write.once('alldone', function(err) {
+write.once('error', function (err) {});
+write.once('alldone', function (err) {
console.log('ok');
});
-process.on('exit', function(c) {
+process.on('exit', function (c) {
console.error('error thrown even with listener');
});
-read.pipe(write);
-
+read.pipe(write);
\ No newline at end of file
diff --git a/test/parallel/test-stream2-push.js b/test/parallel/test-stream2-push.js
index 695a1add74..7cc368aac0 100644
--- a/test/parallel/test-stream2-push.js
+++ b/test/parallel/test-stream2-push.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var stream = require('../../');
var Readable = stream.Readable;
@@ -7,7 +8,6 @@ var assert = require('assert');
var EE = require('events').EventEmitter;
-
// a mock thing a bit like the net.Socket/tcp_wrap.handle interaction
stream = new Readable({
@@ -17,24 +17,23 @@ stream = new Readable({
var source = new EE();
-stream._read = function() {
+stream._read = function () {
console.error('stream._read');
readStart();
};
var ended = false;
-stream.on('end', function() {
+stream.on('end', function () {
ended = true;
});
-source.on('data', function(chunk) {
+source.on('data', function (chunk) {
var ret = stream.push(chunk);
console.error('data', stream._readableState.length);
- if (!ret)
- readStop();
+ if (!ret) readStop();
});
-source.on('end', function() {
+source.on('end', function () {
stream.push(null);
});
@@ -48,10 +47,9 @@ function readStart() {
function readStop() {
console.error('readStop');
reading = false;
- process.nextTick(function() {
+ process.nextTick(function () {
var r = stream.read();
- if (r !== null)
- writer.write(r);
+ if (r !== null) writer.write(r);
});
}
@@ -61,15 +59,9 @@ var writer = new Writable({
var written = [];
-var expectWritten =
- [ 'asdfgasdfgasdfgasdfg',
- 'asdfgasdfgasdfgasdfg',
- 'asdfgasdfgasdfgasdfg',
- 'asdfgasdfgasdfgasdfg',
- 'asdfgasdfgasdfgasdfg',
- 'asdfgasdfgasdfgasdfg' ];
+var expectWritten = ['asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg', 'asdfgasdfgasdfgasdfg'];
-writer._write = function(chunk, encoding, cb) {
+writer._write = function (chunk, encoding, cb) {
console.error('WRITE %s', chunk);
written.push(chunk);
process.nextTick(cb);
@@ -77,7 +69,6 @@ writer._write = function(chunk, encoding, cb) {
writer.on('finish', finish);
-
// now emit some chunks.
var chunk = 'asdfg';
@@ -95,10 +86,7 @@ function data() {
assert(reading);
source.emit('data', chunk);
assert(!reading);
- if (set++ < 5)
- setTimeout(data, 10);
- else
- end();
+ if (set++ < 5) setTimeout(data, 10);else end();
}
function finish() {
@@ -111,7 +99,7 @@ function end() {
source.emit('end');
assert(!reading);
writer.end(stream.read());
- setTimeout(function() {
+ setTimeout(function () {
assert(ended);
});
-}
+}
\ No newline at end of file
diff --git a/test/parallel/test-stream2-read-sync-stack.js b/test/parallel/test-stream2-read-sync-stack.js
index 44d1d90ae0..a6a94b1f00 100644
--- a/test/parallel/test-stream2-read-sync-stack.js
+++ b/test/parallel/test-stream2-read-sync-stack.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
var Readable = require('../../').Readable;
@@ -10,14 +11,13 @@ var N = 256 * 1024;
process.maxTickDepth = N + 2;
var reads = 0;
-r._read = function(n) {
+r._read = function (n) {
var chunk = reads++ === N ? null : new Buffer(1);
r.push(chunk);
};
r.on('readable', function onReadable() {
- if (!(r._readableState.length % 256))
- console.error('readable', r._readableState.length);
+ if (!(r._readableState.length % 256)) console.error('readable', r._readableState.length);
r.read(N * 2);
});
@@ -28,7 +28,7 @@ r.on('end', function onEnd() {
r.read(0);
-process.on('exit', function() {
+process.on('exit', function () {
assert(ended);
console.log('ok');
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js
index cb57f2fa90..42f46da1eb 100644
--- a/test/parallel/test-stream2-readable-empty-buffer-no-eof.js
+++ b/test/parallel/test-stream2-readable-empty-buffer-no-eof.js
@@ -1,4 +1,5 @@
'use strict';
+
var common = require('../common');
var assert = require('assert');
@@ -24,7 +25,7 @@ function test1() {
buf.fill('x');
var reads = 5;
var timeout = common.platformTimeout(50);
- r._read = function(n) {
+ r._read = function (n) {
switch (reads--) {
case 0:
return r.push(null); // EOF
@@ -35,16 +36,16 @@ function test1() {
return r.push(new Buffer(0)); // Not-EOF!
case 3:
setTimeout(r.read.bind(r, 0), timeout);
- return process.nextTick(function() {
+ return process.nextTick(function () {
return r.push(new Buffer(0));
});
case 4:
setTimeout(r.read.bind(r, 0), timeout);
- return setTimeout(function() {
+ return setTimeout(function () {
return r.push(new Buffer(0));
});
case 5:
- return setTimeout(function() {
+ return setTimeout(function () {
return r.push(buf);
});
default:
@@ -54,18 +55,19 @@ function test1() {
var results = [];
function flow() {
- var chunk;
- while (null !== (chunk = r.read()))
+ var chunk = undefined;
+ while (null !== (chunk = r.read())) {
results.push(chunk + '');
+ }
}
r.on('readable', flow);
- r.on('end', function() {
+ r.on('end', function () {
results.push('EOF');
});
flow();
- process.on('exit', function() {
- assert.deepEqual(results, [ 'xxxxx', 'xxxxx', 'EOF' ]);
+ process.on('exit', function () {
+ assert.deepEqual(results, ['xxxxx', 'xxxxx', 'EOF']);
console.log('ok');
});
}
@@ -73,27 +75,26 @@ function test1() {
function test2() {
var r = new Readable({ encoding: 'base64' });
var reads = 5;
- r._read = function(n) {
- if (!reads--)
- return r.push(null); // EOF
- else
- return r.push(new Buffer('x'));
+ r._read = function (n) {
+ if (! reads--) return r.push(null); // EOF
+ else return r.push(new Buffer('x'));
};
var results = [];
function flow() {
var chunk;
- while (null !== (chunk = r.read()))
+ while (null !== (chunk = r.read())) {
results.push(chunk + '');
+ }
}
r.on('readable', flow);
- r.on('end', function() {
+ r.on('end', function () {
results.push('EOF');
});
flow();
- process.on('exit', function() {
- assert.deepEqual(results, [ 'eHh4', 'eHg=', 'EOF' ]);
+ process.on('exit', function () {
+ assert.deepEqual(results, ['eHh4', 'eHg=', 'EOF']);
console.log('ok');
});
-}
+}
\ No newline at end of file
diff --git a/test/parallel/test-stream2-readable-from-list.js b/test/parallel/test-stream2-readable-from-list.js
index 3240bb9bd4..6bc8b0ef1d 100644
--- a/test/parallel/test-stream2-readable-from-list.js
+++ b/test/parallel/test-stream2-readable-from-list.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
var fromList = require('../../lib/_stream_readable')._fromList;
@@ -14,8 +15,7 @@ function test(name, fn) {
function run() {
var next = tests.shift();
- if (!next)
- return console.error('ok');
+ if (!next) return console.error('ok');
var name = next[0];
var fn = next[1];
@@ -23,7 +23,7 @@ function run() {
fn({
same: assert.deepEqual,
equal: assert.equal,
- end: function() {
+ end: function () {
count--;
run();
}
@@ -31,18 +31,14 @@ function run() {
}
// ensure all tests have run
-process.on('exit', function() {
+process.on('exit', function () {
assert.equal(count, 0);
});
process.nextTick(run);
-
-test('buffers', function(t) {
- var list = [ new Buffer('foog'),
- new Buffer('bark'),
- new Buffer('bazy'),
- new Buffer('kuel') ];
+test('buffers', function (t) {
+ var list = [new Buffer('foog'), new Buffer('bark'), new Buffer('bazy'), new Buffer('kuel')];
// read more than the first element.
var ret = fromList(6, { buffer: list, length: 16 });
@@ -66,11 +62,8 @@ test('buffers', function(t) {
t.end();
});
-test('strings', function(t) {
- var list = [ 'foog',
- 'bark',
- 'bazy',
- 'kuel' ];
+test('strings', function (t) {
+ var list = ['foog', 'bark', 'bazy', 'kuel'];
// read more than the first element.
var ret = fromList(6, { buffer: list, length: 16, decoder: true });
@@ -92,4 +85,4 @@ test('strings', function(t) {
t.same(list, []);
t.end();
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream2-readable-legacy-drain.js b/test/parallel/test-stream2-readable-legacy-drain.js
index 9962d893aa..46010c7d00 100644
--- a/test/parallel/test-stream2-readable-legacy-drain.js
+++ b/test/parallel/test-stream2-readable-legacy-drain.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -8,12 +9,12 @@ var Readable = require('../../').Readable;
var r = new Readable();
var N = 256;
var reads = 0;
-r._read = function(n) {
+r._read = function (n) {
return r.push(++reads === N ? null : new Buffer(1));
};
var rended = false;
-r.on('end', function() {
+r.on('end', function () {
rended = true;
});
@@ -21,7 +22,7 @@ var w = new Stream();
w.writable = true;
var writes = 0;
var buffered = 0;
-w.write = function(c) {
+w.write = function (c) {
writes += c.length;
buffered += c.length;
process.nextTick(drain);
@@ -34,22 +35,21 @@ function drain() {
w.emit('drain');
}
-
var wended = false;
-w.end = function() {
+w.end = function () {
wended = true;
};
// Just for kicks, let's mess with the drain count.
// This verifies that even if it gets negative in the
// pipe() cleanup function, we'll still function properly.
-r.on('readable', function() {
+r.on('readable', function () {
w.emit('drain');
});
r.pipe(w);
-process.on('exit', function() {
+process.on('exit', function () {
assert(rended);
assert(wended);
console.error('ok');
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream2-readable-non-empty-end.js b/test/parallel/test-stream2-readable-non-empty-end.js
index eebbfb34ef..466c8f6605 100644
--- a/test/parallel/test-stream2-readable-non-empty-end.js
+++ b/test/parallel/test-stream2-readable-non-empty-end.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
var Readable = require('../../lib/_stream_readable');
@@ -12,9 +13,9 @@ for (var i = 1; i <= 10; i++) {
var test = new Readable();
var n = 0;
-test._read = function(size) {
+test._read = function (size) {
var chunk = chunks[n++];
- setTimeout(function() {
+ setTimeout(function () {
test.push(chunk === undefined ? null : chunk);
});
};
@@ -25,7 +26,7 @@ function thrower() {
}
var bytesread = 0;
-test.on('readable', function() {
+test.on('readable', function () {
var b = len - bytesread - 1;
var res = test.read(b);
if (res) {
@@ -42,10 +43,10 @@ function next() {
test.removeListener('end', thrower);
var endEmitted = false;
- process.on('exit', function() {
+ process.on('exit', function () {
assert(endEmitted, 'end should be emitted by now');
});
- test.on('end', function() {
+ test.on('end', function () {
endEmitted = true;
});
@@ -55,4 +56,4 @@ function next() {
assert.equal(r.length, 1);
r = test.read();
assert.equal(r, null);
-}
+}
\ No newline at end of file
diff --git a/test/parallel/test-stream2-readable-wrap-empty.js b/test/parallel/test-stream2-readable-wrap-empty.js
index a963eba7ac..c2b2071282 100644
--- a/test/parallel/test-stream2-readable-wrap-empty.js
+++ b/test/parallel/test-stream2-readable-wrap-empty.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -6,18 +7,18 @@ var Readable = require('../../lib/_stream_readable');
var EE = require('events').EventEmitter;
var oldStream = new EE();
-oldStream.pause = function() {};
-oldStream.resume = function() {};
+oldStream.pause = function () {};
+oldStream.resume = function () {};
var newStream = new Readable().wrap(oldStream);
var ended = false;
-newStream
- .on('readable', function() {})
- .on('end', function() { ended = true; });
+newStream.on('readable', function () {}).on('end', function () {
+ ended = true;
+});
oldStream.emit('end');
-process.on('exit', function() {
+process.on('exit', function () {
assert.ok(ended);
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream2-set-encoding.js b/test/parallel/test-stream2-set-encoding.js
index dc0bbb7be6..198eb91386 100644
--- a/test/parallel/test-stream2-set-encoding.js
+++ b/test/parallel/test-stream2-set-encoding.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
var R = require('../../lib/_stream_readable');
@@ -15,8 +16,7 @@ function test(name, fn) {
function run() {
var next = tests.shift();
- if (!next)
- return console.error('ok');
+ if (!next) return console.error('ok');
var name = next[0];
var fn = next[1];
@@ -24,7 +24,7 @@ function run() {
fn({
same: assert.deepEqual,
equal: assert.equal,
- end: function() {
+ end: function () {
count--;
run();
}
@@ -32,7 +32,7 @@ function run() {
}
// ensure all tests have run
-process.on('exit', function() {
+process.on('exit', function () {
assert.equal(count, 0);
});
@@ -49,8 +49,8 @@ function TestReader(n, opts) {
this.len = n || 100;
}
-TestReader.prototype._read = function(n) {
- setTimeout(function() {
+TestReader.prototype._read = function (n) {
+ setTimeout(function () {
if (this.pos >= this.len) {
// double push(null) to test eos handling
@@ -75,272 +75,158 @@ TestReader.prototype._read = function(n) {
}.bind(this), 1);
};
-test('setEncoding utf8', function(t) {
+test('setEncoding utf8', function (t) {
var tr = new TestReader(100);
tr.setEncoding('utf8');
var out = [];
- var expect =
- [ 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa' ];
+ var expect = ['aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa'];
tr.on('readable', function flow() {
var chunk;
- while (null !== (chunk = tr.read(10)))
+ while (null !== (chunk = tr.read(10))) {
out.push(chunk);
+ }
});
- tr.on('end', function() {
+ tr.on('end', function () {
t.same(out, expect);
t.end();
});
});
-
-test('setEncoding hex', function(t) {
+test('setEncoding hex', function (t) {
var tr = new TestReader(100);
tr.setEncoding('hex');
var out = [];
- var expect =
- [ '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161' ];
+ var expect = ['6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161'];
tr.on('readable', function flow() {
var chunk;
- while (null !== (chunk = tr.read(10)))
+ while (null !== (chunk = tr.read(10))) {
out.push(chunk);
+ }
});
- tr.on('end', function() {
+ tr.on('end', function () {
t.same(out, expect);
t.end();
});
});
-test('setEncoding hex with read(13)', function(t) {
+test('setEncoding hex with read(13)', function (t) {
var tr = new TestReader(100);
tr.setEncoding('hex');
var out = [];
- var expect =
- [ '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '16161' ];
+ var expect = ['6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '16161'];
tr.on('readable', function flow() {
console.log('readable once');
var chunk;
- while (null !== (chunk = tr.read(13)))
+ while (null !== (chunk = tr.read(13))) {
out.push(chunk);
+ }
});
- tr.on('end', function() {
+ tr.on('end', function () {
console.log('END');
t.same(out, expect);
t.end();
});
});
-test('setEncoding base64', function(t) {
+test('setEncoding base64', function (t) {
var tr = new TestReader(100);
tr.setEncoding('base64');
var out = [];
- var expect =
- [ 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYQ==' ];
+ var expect = ['YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYQ=='];
tr.on('readable', function flow() {
var chunk;
- while (null !== (chunk = tr.read(10)))
+ while (null !== (chunk = tr.read(10))) {
out.push(chunk);
+ }
});
- tr.on('end', function() {
+ tr.on('end', function () {
t.same(out, expect);
t.end();
});
});
-test('encoding: utf8', function(t) {
+test('encoding: utf8', function (t) {
var tr = new TestReader(100, { encoding: 'utf8' });
var out = [];
- var expect =
- [ 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa',
- 'aaaaaaaaaa' ];
+ var expect = ['aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa', 'aaaaaaaaaa'];
tr.on('readable', function flow() {
var chunk;
- while (null !== (chunk = tr.read(10)))
+ while (null !== (chunk = tr.read(10))) {
out.push(chunk);
+ }
});
- tr.on('end', function() {
+ tr.on('end', function () {
t.same(out, expect);
t.end();
});
});
-
-test('encoding: hex', function(t) {
+test('encoding: hex', function (t) {
var tr = new TestReader(100, { encoding: 'hex' });
var out = [];
- var expect =
- [ '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161',
- '6161616161' ];
+ var expect = ['6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161', '6161616161'];
tr.on('readable', function flow() {
var chunk;
- while (null !== (chunk = tr.read(10)))
+ while (null !== (chunk = tr.read(10))) {
out.push(chunk);
+ }
});
- tr.on('end', function() {
+ tr.on('end', function () {
t.same(out, expect);
t.end();
});
});
-test('encoding: hex with read(13)', function(t) {
+test('encoding: hex with read(13)', function (t) {
var tr = new TestReader(100, { encoding: 'hex' });
var out = [];
- var expect =
- [ '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '1616161616161',
- '6161616161616',
- '16161' ];
+ var expect = ['6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '1616161616161', '6161616161616', '16161'];
tr.on('readable', function flow() {
var chunk;
- while (null !== (chunk = tr.read(13)))
+ while (null !== (chunk = tr.read(13))) {
out.push(chunk);
+ }
});
- tr.on('end', function() {
+ tr.on('end', function () {
t.same(out, expect);
t.end();
});
});
-test('encoding: base64', function(t) {
+test('encoding: base64', function (t) {
var tr = new TestReader(100, { encoding: 'base64' });
var out = [];
- var expect =
- [ 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYWFhYWFh',
- 'YWFhYWFhYW',
- 'FhYQ==' ];
+ var expect = ['YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYWFhYWFh', 'YWFhYWFhYW', 'FhYQ=='];
tr.on('readable', function flow() {
var chunk;
- while (null !== (chunk = tr.read(10)))
+ while (null !== (chunk = tr.read(10))) {
out.push(chunk);
+ }
});
- tr.on('end', function() {
+ tr.on('end', function () {
t.same(out, expect);
t.end();
});
});
-test('chainable', function(t) {
+test('chainable', function (t) {
var tr = new TestReader(100);
t.equal(tr.setEncoding('utf8'), tr);
t.end();
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream2-transform.js b/test/parallel/test-stream2-transform.js
index 6038cef9b7..f7e1736ec2 100644
--- a/test/parallel/test-stream2-transform.js
+++ b/test/parallel/test-stream2-transform.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
var PassThrough = require('../../lib/_stream_passthrough');
@@ -15,8 +16,7 @@ function test(name, fn) {
function run() {
var next = tests.shift();
- if (!next)
- return console.error('ok');
+ if (!next) return console.error('ok');
var name = next[0];
var fn = next[1];
@@ -25,7 +25,7 @@ function run() {
same: assert.deepEqual,
equal: assert.equal,
ok: assert,
- end: function() {
+ end: function () {
count--;
run();
}
@@ -33,7 +33,7 @@ function run() {
}
// ensure all tests have run
-process.on('exit', function() {
+process.on('exit', function () {
assert.equal(count, 0);
});
@@ -41,13 +41,13 @@ process.nextTick(run);
/////
-test('writable side consumption', function(t) {
+test('writable side consumption', function (t) {
var tx = new Transform({
highWaterMark: 10
});
var transformed = 0;
- tx._transform = function(chunk, encoding, cb) {
+ tx._transform = function (chunk, encoding, cb) {
transformed += chunk.length;
tx.push(chunk);
cb();
@@ -61,14 +61,14 @@ test('writable side consumption', function(t) {
t.equal(tx._readableState.length, 10);
t.equal(transformed, 10);
t.equal(tx._transformState.writechunk.length, 5);
- t.same(tx._writableState.getBuffer().map(function(c) {
+ t.same(tx._writableState.getBuffer().map(function (c) {
return c.chunk.length;
}), [6, 7, 8, 9, 10]);
t.end();
});
-test('passthrough', function(t) {
+test('passthrough', function (t) {
var pt = new PassThrough();
pt.write(new Buffer('foog'));
@@ -84,7 +84,7 @@ test('passthrough', function(t) {
t.end();
});
-test('object passthrough', function(t) {
+test('object passthrough', function (t) {
var pt = new PassThrough({ objectMode: true });
pt.write(1);
@@ -93,7 +93,7 @@ test('object passthrough', function(t) {
pt.write(0);
pt.write('foo');
pt.write('');
- pt.write({ a: 'b'});
+ pt.write({ a: 'b' });
pt.end();
t.equal(pt.read(), 1);
@@ -102,13 +102,13 @@ test('object passthrough', function(t) {
t.equal(pt.read(), 0);
t.equal(pt.read(), 'foo');
t.equal(pt.read(), '');
- t.same(pt.read(), { a: 'b'});
+ t.same(pt.read(), { a: 'b' });
t.end();
});
-test('simple transform', function(t) {
+test('simple transform', function (t) {
var pt = new Transform();
- pt._transform = function(c, e, cb) {
+ pt._transform = function (c, e, cb) {
var ret = new Buffer(c.length);
ret.fill('x');
pt.push(ret);
@@ -128,9 +128,9 @@ test('simple transform', function(t) {
t.end();
});
-test('simple object transform', function(t) {
+test('simple object transform', function (t) {
var pt = new Transform({ objectMode: true });
- pt._transform = function(c, e, cb) {
+ pt._transform = function (c, e, cb) {
pt.push(JSON.stringify(c));
cb();
};
@@ -141,7 +141,7 @@ test('simple object transform', function(t) {
pt.write(0);
pt.write('foo');
pt.write('');
- pt.write({ a: 'b'});
+ pt.write({ a: 'b' });
pt.end();
t.equal(pt.read(), '1');
@@ -154,10 +154,10 @@ test('simple object transform', function(t) {
t.end();
});
-test('async passthrough', function(t) {
+test('async passthrough', function (t) {
var pt = new Transform();
- pt._transform = function(chunk, encoding, cb) {
- setTimeout(function() {
+ pt._transform = function (chunk, encoding, cb) {
+ setTimeout(function () {
pt.push(chunk);
cb();
}, 10);
@@ -169,7 +169,7 @@ test('async passthrough', function(t) {
pt.write(new Buffer('kuel'));
pt.end();
- pt.on('finish', function() {
+ pt.on('finish', function () {
t.equal(pt.read(5).toString(), 'foogb');
t.equal(pt.read(5).toString(), 'arkba');
t.equal(pt.read(5).toString(), 'zykue');
@@ -178,14 +178,14 @@ test('async passthrough', function(t) {
});
});
-test('assymetric transform (expand)', function(t) {
+test('assymetric transform (expand)', function (t) {
var pt = new Transform();
// emit each chunk 2 times.
- pt._transform = function(chunk, encoding, cb) {
- setTimeout(function() {
+ pt._transform = function (chunk, encoding, cb) {
+ setTimeout(function () {
pt.push(chunk);
- setTimeout(function() {
+ setTimeout(function () {
pt.push(chunk);
cb();
}, 10);
@@ -198,7 +198,7 @@ test('assymetric transform (expand)', function(t) {
pt.write(new Buffer('kuel'));
pt.end();
- pt.on('finish', function() {
+ pt.on('finish', function () {
t.equal(pt.read(5).toString(), 'foogf');
t.equal(pt.read(5).toString(), 'oogba');
t.equal(pt.read(5).toString(), 'rkbar');
@@ -210,18 +210,17 @@ test('assymetric transform (expand)', function(t) {
});
});
-test('assymetric transform (compress)', function(t) {
+test('assymetric transform (compress)', function (t) {
var pt = new Transform();
// each output is the first char of 3 consecutive chunks,
// or whatever's left.
pt.state = '';
- pt._transform = function(chunk, encoding, cb) {
- if (!chunk)
- chunk = '';
+ pt._transform = function (chunk, encoding, cb) {
+ if (!chunk) chunk = '';
var s = chunk.toString();
- setTimeout(function() {
+ setTimeout(function () {
this.state += s.charAt(0);
if (this.state.length === 3) {
pt.push(new Buffer(this.state));
@@ -231,7 +230,7 @@ test('assymetric transform (compress)', function(t) {
}.bind(this), 10);
};
- pt._flush = function(cb) {
+ pt._flush = function (cb) {
// just output whatever we have.
pt.push(new Buffer(this.state));
this.state = '';
@@ -255,7 +254,7 @@ test('assymetric transform (compress)', function(t) {
pt.end();
// 'abcdeabcdeabcd'
- pt.on('finish', function() {
+ pt.on('finish', function () {
t.equal(pt.read(5).toString(), 'abcde');
t.equal(pt.read(5).toString(), 'abcde');
t.equal(pt.read(5).toString(), 'abcd');
@@ -265,14 +264,12 @@ test('assymetric transform (compress)', function(t) {
// this tests for a stall when data is written to a full stream
// that has empty transforms.
-test('complex transform', function(t) {
+test('complex transform', function (t) {
var count = 0;
var saved = null;
- var pt = new Transform({highWaterMark:3});
- pt._transform = function(c, e, cb) {
- if (count++ === 1)
- saved = c;
- else {
+ var pt = new Transform({ highWaterMark: 3 });
+ pt._transform = function (c, e, cb) {
+ if (count++ === 1) saved = c;else {
if (saved) {
pt.push(saved);
saved = null;
@@ -283,10 +280,10 @@ test('complex transform', function(t) {
cb();
};
- pt.once('readable', function() {
- process.nextTick(function() {
+ pt.once('readable', function () {
+ process.nextTick(function () {
pt.write(new Buffer('d'));
- pt.write(new Buffer('ef'), function() {
+ pt.write(new Buffer('ef'), function () {
pt.end();
t.end();
});
@@ -298,11 +295,10 @@ test('complex transform', function(t) {
pt.write(new Buffer('abc'));
});
-
-test('passthrough event emission', function(t) {
+test('passthrough event emission', function (t) {
var pt = new PassThrough();
var emits = 0;
- pt.on('readable', function() {
+ pt.on('readable', function () {
console.error('>>> emit readable %d', emits);
emits++;
});
@@ -345,10 +341,10 @@ test('passthrough event emission', function(t) {
t.end();
});
-test('passthrough event emission reordered', function(t) {
+test('passthrough event emission reordered', function (t) {
var pt = new PassThrough();
var emits = 0;
- pt.on('readable', function() {
+ pt.on('readable', function () {
console.error('emit readable', emits);
emits++;
});
@@ -363,16 +359,16 @@ test('passthrough event emission reordered', function(t) {
t.equal(pt.read(5), null);
console.error('need emit 1');
- pt.once('readable', function() {
+ pt.once('readable', function () {
t.equal(pt.read(5).toString(), 'arkba');
t.equal(pt.read(5), null);
console.error('need emit 2');
- pt.once('readable', function() {
+ pt.once('readable', function () {
t.equal(pt.read(5).toString(), 'zykue');
t.equal(pt.read(5), null);
- pt.once('readable', function() {
+ pt.once('readable', function () {
t.equal(pt.read(5).toString(), 'l');
t.equal(pt.read(5), null);
t.equal(emits, 4);
@@ -386,27 +382,27 @@ test('passthrough event emission reordered', function(t) {
pt.write(new Buffer('bazy'));
});
-test('passthrough facaded', function(t) {
+test('passthrough facaded', function (t) {
console.error('passthrough facaded');
var pt = new PassThrough();
var datas = [];
- pt.on('data', function(chunk) {
+ pt.on('data', function (chunk) {
datas.push(chunk.toString());
});
- pt.on('end', function() {
+ pt.on('end', function () {
t.same(datas, ['foog', 'bark', 'bazy', 'kuel']);
t.end();
});
pt.write(new Buffer('foog'));
- setTimeout(function() {
+ setTimeout(function () {
pt.write(new Buffer('bark'));
- setTimeout(function() {
+ setTimeout(function () {
pt.write(new Buffer('bazy'));
- setTimeout(function() {
+ setTimeout(function () {
pt.write(new Buffer('kuel'));
- setTimeout(function() {
+ setTimeout(function () {
pt.end();
}, 10);
}, 10);
@@ -414,10 +410,10 @@ test('passthrough facaded', function(t) {
}, 10);
});
-test('object transform (json parse)', function(t) {
+test('object transform (json parse)', function (t) {
console.error('json parse stream');
var jp = new Transform({ objectMode: true });
- jp._transform = function(data, encoding, cb) {
+ jp._transform = function (data, encoding, cb) {
try {
jp.push(JSON.parse(data));
cb();
@@ -428,19 +424,14 @@ test('object transform (json parse)', function(t) {
// anything except null/undefined is fine.
// those are "magic" in the stream API, because they signal EOF.
- var objects = [
- { foo: 'bar' },
- 100,
- 'string',
- { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }
- ];
+ var objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }];
var ended = false;
- jp.on('end', function() {
+ jp.on('end', function () {
ended = true;
});
- forEach(objects, function(obj) {
+ forEach(objects, function (obj) {
jp.write(JSON.stringify(obj));
var res = jp.read();
t.same(res, obj);
@@ -450,16 +441,16 @@ test('object transform (json parse)', function(t) {
// read one more time to get the 'end' event
jp.read();
- process.nextTick(function() {
+ process.nextTick(function () {
t.ok(ended);
t.end();
});
});
-test('object transform (json stringify)', function(t) {
+test('object transform (json stringify)', function (t) {
console.error('json parse stream');
var js = new Transform({ objectMode: true });
- js._transform = function(data, encoding, cb) {
+ js._transform = function (data, encoding, cb) {
try {
js.push(JSON.stringify(data));
cb();
@@ -470,19 +461,14 @@ test('object transform (json stringify)', function(t) {
// anything except null/undefined is fine.
// those are "magic" in the stream API, because they signal EOF.
- var objects = [
- { foo: 'bar' },
- 100,
- 'string',
- { nested: { things: [ { foo: 'bar' }, 100, 'string' ] } }
- ];
+ var objects = [{ foo: 'bar' }, 100, 'string', { nested: { things: [{ foo: 'bar' }, 100, 'string'] } }];
var ended = false;
- js.on('end', function() {
+ js.on('end', function () {
ended = true;
});
- forEach(objects, function(obj) {
+ forEach(objects, function (obj) {
js.write(obj);
var res = js.read();
t.equal(res, JSON.stringify(obj));
@@ -492,14 +478,14 @@ test('object transform (json stringify)', function(t) {
// read one more time to get the 'end' event
js.read();
- process.nextTick(function() {
+ process.nextTick(function () {
t.ok(ended);
t.end();
});
});
-function forEach (xs, f) {
+function forEach(xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
-}
+}
\ No newline at end of file
diff --git a/test/parallel/test-stream2-unpipe-drain.js b/test/parallel/test-stream2-unpipe-drain.js
index 1a7b85a8a7..93facc8c8f 100644
--- a/test/parallel/test-stream2-unpipe-drain.js
+++ b/test/parallel/test-stream2-unpipe-drain.js
@@ -1,60 +1,62 @@
-'use strict';
-var common = require('../common');
-var assert = require('assert');
-var stream = require('../../');
+(function () {
+ 'use strict';
-if (!common.hasCrypto) {
- console.log('1..0 # Skipped: missing crypto');
- return;
-}
-var crypto = require('crypto');
+ var common = require('../common');
+ var assert = require('assert');
+ var stream = require('../../');
-var util = require('util');
+ if (!common.hasCrypto) {
+ console.log('1..0 # Skipped: missing crypto');
+ return;
+ }
+ var crypto = require('crypto');
-function TestWriter() {
- stream.Writable.call(this);
-}
-util.inherits(TestWriter, stream.Writable);
+ var util = require('util');
-TestWriter.prototype._write = function(buffer, encoding, callback) {
- console.log('write called');
- // super slow write stream (callback never called)
-};
+ function TestWriter() {
+ stream.Writable.call(this);
+ }
+ util.inherits(TestWriter, stream.Writable);
-var dest = new TestWriter();
+ TestWriter.prototype._write = function (buffer, encoding, callback) {
+ console.log('write called');
+ // super slow write stream (callback never called)
+ };
-function TestReader(id) {
- stream.Readable.call(this);
- this.reads = 0;
-}
-util.inherits(TestReader, stream.Readable);
+ var dest = new TestWriter();
-TestReader.prototype._read = function(size) {
- this.reads += 1;
- this.push(crypto.randomBytes(size));
-};
+ function TestReader(id) {
+ stream.Readable.call(this);
+ this.reads = 0;
+ }
+ util.inherits(TestReader, stream.Readable);
-var src1 = new TestReader();
-var src2 = new TestReader();
+ TestReader.prototype._read = function (size) {
+ this.reads += 1;
+ this.push(crypto.randomBytes(size));
+ };
-src1.pipe(dest);
+ var src1 = new TestReader();
+ var src2 = new TestReader();
-src1.once('readable', function() {
- process.nextTick(function() {
+ src1.pipe(dest);
- src2.pipe(dest);
+ src1.once('readable', function () {
+ process.nextTick(function () {
- src2.once('readable', function() {
- process.nextTick(function() {
+ src2.pipe(dest);
- src1.unpipe(dest);
+ src2.once('readable', function () {
+ process.nextTick(function () {
+
+ src1.unpipe(dest);
+ });
});
});
});
-});
-
-process.on('exit', function() {
- assert.equal(src1.reads, 2);
- assert.equal(src2.reads, 2);
-});
+ process.on('exit', function () {
+ assert.equal(src1.reads, 2);
+ assert.equal(src2.reads, 2);
+ });
+})();
\ No newline at end of file
diff --git a/test/parallel/test-stream2-unpipe-leak.js b/test/parallel/test-stream2-unpipe-leak.js
index a268584c10..7594a27ddc 100644
--- a/test/parallel/test-stream2-unpipe-leak.js
+++ b/test/parallel/test-stream2-unpipe-leak.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
var stream = require('../../');
@@ -12,7 +13,7 @@ function TestWriter() {
}
util.inherits(TestWriter, stream.Writable);
-TestWriter.prototype._write = function(buffer, encoding, callback) {
+TestWriter.prototype._write = function (buffer, encoding, callback) {
callback(null);
};
@@ -25,7 +26,7 @@ function TestReader() {
}
util.inherits(TestReader, stream.Readable);
-TestReader.prototype._read = function(size) {
+TestReader.prototype._read = function (size) {
this.push(chunk);
};
@@ -46,9 +47,9 @@ assert.equal(dest.listeners('close').length, 0);
assert.equal(dest.listeners('finish').length, 0);
console.error(src._readableState);
-process.on('exit', function() {
+process.on('exit', function () {
src._readableState.buffer.length = 0;
console.error(src._readableState);
assert(src._readableState.length >= src._readableState.highWaterMark);
console.log('ok');
-});
+});
\ No newline at end of file
diff --git a/test/parallel/test-stream2-writable.js b/test/parallel/test-stream2-writable.js
index a47fd6b272..81a5d41ccc 100644
--- a/test/parallel/test-stream2-writable.js
+++ b/test/parallel/test-stream2-writable.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var W = require('../../lib/_stream_writable');
var D = require('../../lib/_stream_duplex');
@@ -13,9 +14,9 @@ function TestWriter() {
this.written = 0;
}
-TestWriter.prototype._write = function(chunk, encoding, cb) {
+TestWriter.prototype._write = function (chunk, encoding, cb) {
// simulate a small unpredictable latency
- setTimeout(function() {
+ setTimeout(function () {
this.buffer.push(chunk.toString());
this.written += chunk.length;
cb();
@@ -38,8 +39,7 @@ function test(name, fn) {
function run() {
var next = tests.shift();
- if (!next)
- return console.error('ok');
+ if (!next) return console.error('ok');
var name = next[0];
var fn = next[1];
@@ -47,7 +47,7 @@ function run() {
fn({
same: assert.deepEqual,
equal: assert.equal,
- end: function() {
+ end: function () {
count--;
run();
}
@@ -55,35 +55,35 @@ function run() {
}
// ensure all tests have run
-process.on('exit', function() {
+process.on('exit', function () {
assert.equal(count, 0);
});
process.nextTick(run);
-test('write fast', function(t) {
+test('write fast', function (t) {
var tw = new TestWriter({
highWaterMark: 100
});
- tw.on('finish', function() {
+ tw.on('finish', function () {
t.same(tw.buffer, chunks, 'got chunks in the right order');
t.end();
});
- forEach(chunks, function(chunk) {
+ forEach(chunks, function (chunk) {
// screw backpressure. Just buffer it all up.
tw.write(chunk);
});
tw.end();
});
-test('write slow', function(t) {
+test('write slow', function (t) {
var tw = new TestWriter({
highWaterMark: 100
});
- tw.on('finish', function() {
+ tw.on('finish', function () {
t.same(tw.buffer, chunks, 'got chunks in the right order');
t.end();
});
@@ -91,27 +91,24 @@ test('write slow', function(t) {
var i = 0;
(function W() {
tw.write(chunks[i++]);
- if (i < chunks.length)
- setTimeout(W, 10);
- else
- tw.end();
+ if (i < chunks.length) setTimeout(W, 10);else tw.end();
})();
});
-test('write backpressure', function(t) {
+test('write backpressure', function (t) {
var tw = new TestWriter({
highWaterMark: 50
});
var drains = 0;
- tw.on('finish', function() {
+ tw.on('finish', function () {
t.same(tw.buffer, chunks, 'got chunks in the right order');
t.equal(drains, 17);
t.end();
});
- tw.on('drain', function() {
+ tw.on('drain', function () {
drains++;
});
@@ -130,79 +127,57 @@ test('write backpressure', function(t) {
})();
});
-test('write bufferize', function(t) {
+test('write bufferize', function (t) {
var tw = new TestWriter({
highWaterMark: 100
});
- var encodings =
- [ 'hex',
- 'utf8',
- 'utf-8',
- 'ascii',
- 'binary',
- 'base64',
- 'ucs2',
- 'ucs-2',
- 'utf16le',
- 'utf-16le',
- undefined ];
-
- tw.on('finish', function() {
+ var encodings = ['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', undefined];
+
+ tw.on('finish', function () {
t.same(tw.buffer, chunks, 'got the expected chunks');
});
- forEach(chunks, function(chunk, i) {
- var enc = encodings[ i % encodings.length ];
+ forEach(chunks, function (chunk, i) {
+ var enc = encodings[i % encodings.length];
chunk = new Buffer(chunk);
tw.write(chunk.toString(enc), enc);
});
t.end();
});
-test('write no bufferize', function(t) {
+test('write no bufferize', function (t) {
var tw = new TestWriter({
highWaterMark: 100,
decodeStrings: false
});
- tw._write = function(chunk, encoding, cb) {
+ tw._write = function (chunk, encoding, cb) {
assert(typeof chunk === 'string');
chunk = new Buffer(chunk, encoding);
return TestWriter.prototype._write.call(this, chunk, encoding, cb);
};
- var encodings =
- [ 'hex',
- 'utf8',
- 'utf-8',
- 'ascii',
- 'binary',
- 'base64',
- 'ucs2',
- 'ucs-2',
- 'utf16le',
- 'utf-16le',
- undefined ];
-
- tw.on('finish', function() {
+ var encodings = ['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', undefined];
+
+ tw.on('finish', function () {
t.same(tw.buffer, chunks, 'got the expected chunks');
});
- forEach(chunks, function(chunk, i) {
- var enc = encodings[ i % encodings.length ];
+ forEach(chunks, function (chunk, i) {
+ var enc = encodings[i % encodings.length];
chunk = new Buffer(chunk);
tw.write(chunk.toString(enc), enc);
});
t.end();
});
-test('write callbacks', function(t) {
- var callbacks = chunks.map(function(chunk, i) {
- return [i, function(er) {
+test('write callbacks', function (t) {
+ var callbacks = chunks.map(function (chunk, i) {
+ return [i, function (er) {
callbacks._called[i] = chunk;
}];
- }).reduce(function(set, x) {
+ }).reduce(function (set, x) {
set['callback-' + x[0]] = x[1];
return set;
}, {});
@@ -212,65 +187,65 @@ test('write callbacks', function(t) {
highWaterMark: 100
});
- tw.on('finish', function() {
- process.nextTick(function() {
+ tw.on('finish', function () {
+ process.nextTick(function () {
t.same(tw.buffer, chunks, 'got chunks in the right order');
t.same(callbacks._called, chunks, 'called all callbacks');
t.end();
});
});
- forEach(chunks, function(chunk, i) {
+ forEach(chunks, function (chunk, i) {
tw.write(chunk, callbacks['callback-' + i]);
});
tw.end();
});
-test('end callback', function(t) {
+test('end callback', function (t) {
var tw = new TestWriter();
- tw.end(function() {
+ tw.end(function () {
t.end();
});
});
-test('end callback with chunk', function(t) {
+test('end callback with chunk', function (t) {
var tw = new TestWriter();
- tw.end(new Buffer('hello world'), function() {
+ tw.end(new Buffer('hello world'), function () {
t.end();
});
});
-test('end callback with chunk and encoding', function(t) {
+test('end callback with chunk and encoding', function (t) {
var tw = new TestWriter();
- tw.end('hello world', 'ascii', function() {
+ tw.end('hello world', 'ascii', function () {
t.end();
});
});
-test('end callback after .write() call', function(t) {
+test('end callback after .write() call', function (t) {
var tw = new TestWriter();
tw.write(new Buffer('hello world'));
- tw.end(function() {
+ tw.end(function () {
t.end();
});
});
-test('end callback called after write callback', function(t) {
+test('end callback called after write callback', function (t) {
var tw = new TestWriter();
var writeCalledback = false;
- tw.write(new Buffer('hello world'), function() {
+ tw.write(new Buffer('hello world'), function () {
writeCalledback = true;
});
- tw.end(function() {
+ tw.end(function () {
t.equal(writeCalledback, true);
t.end();
});
});
-test('encoding should be ignored for buffers', function(t) {
+test('encoding should be ignored for buffers', function (t) {
var tw = new W();
var hex = '018b5e9a8f6236ffe30e31baf80d2cf6eb';
- tw._write = function(chunk, encoding, cb) {
+ tw._write = function (chunk, encoding, cb) {
t.equal(chunk.toString('hex'), hex);
t.end();
};
@@ -278,11 +253,11 @@ test('encoding should be ignored for buffers', function(t) {
tw.write(buf, 'binary');
});
-test('writables are not pipable', function(t) {
+test('writables are not pipable', function (t) {
var w = new W();
- w._write = function() {};
+ w._write = function () {};
var gotError = false;
- w.on('error', function(er) {
+ w.on('error', function (er) {
gotError = true;
});
w.pipe(process.stdout);
@@ -290,12 +265,12 @@ test('writables are not pipable', function(t) {
t.end();
});
-test('duplexes are pipable', function(t) {
+test('duplexes are pipable', function (t) {
var d = new D();
- d._read = function() {};
- d._write = function() {};
+ d._read = function () {};
+ d._write = function () {};
var gotError = false;
- d.on('error', function(er) {
+ d.on('error', function (er) {
gotError = true;
});
d.pipe(process.stdout);
@@ -303,35 +278,35 @@ test('duplexes are pipable', function(t) {
t.end();
});
-test('end(chunk) two times is an error', function(t) {
+test('end(chunk) two times is an error', function (t) {
var w = new W();
- w._write = function() {};
+ w._write = function () {};
var gotError = false;
- w.on('error', function(er) {
+ w.on('error', function (er) {
gotError = true;
t.equal(er.message, 'write after end');
});
w.end('this is the end');
w.end('and so is this');
- process.nextTick(function() {
+ process.nextTick(function () {
assert(gotError);
t.end();
});
});
-test('dont end while writing', function(t) {
+test('dont end while writing', function (t) {
var w = new W();
var wrote = false;
- w._write = function(chunk, e, cb) {
+ w._write = function (chunk, e, cb) {
assert(!this.writing);
wrote = true;
this.writing = true;
- setTimeout(function() {
+ setTimeout(function () {
this.writing = false;
cb();
});
};
- w.on('finish', function() {
+ w.on('finish', function () {
assert(wrote);
t.end();
});
@@ -339,16 +314,16 @@ test('dont end while writing', function(t) {
w.end();
});
-test('finish does not come before write cb', function(t) {
+test('finish does not come before write cb', function (t) {
var w = new W();
var writeCb = false;
- w._write = function(chunk, e, cb) {
- setTimeout(function() {
+ w._write = function (chunk, e, cb) {
+ setTimeout(function () {
writeCb = true;
cb();
}, 10);
};
- w.on('finish', function() {
+ w.on('finish', function () {
assert(writeCb);
t.end();
});
@@ -356,36 +331,36 @@ test('finish does not come before write cb', function(t) {
w.end();
});
-test('finish does not come before sync _write cb', function(t) {
+test('finish does not come before sync _write cb', function (t) {
var w = new W();
var writeCb = false;
- w._write = function(chunk, e, cb) {
+ w._write = function (chunk, e, cb) {
cb();
};
- w.on('finish', function() {
+ w.on('finish', function () {
assert(writeCb);
t.end();
});
- w.write(Buffer(0), function(er) {
+ w.write(Buffer(0), function (er) {
writeCb = true;
});
w.end();
});
-test('finish is emitted if last chunk is empty', function(t) {
+test('finish is emitted if last chunk is empty', function (t) {
var w = new W();
- w._write = function(chunk, e, cb) {
+ w._write = function (chunk, e, cb) {
process.nextTick(cb);
};
- w.on('finish', function() {
+ w.on('finish', function () {
t.end();
});
w.write(Buffer(1));
w.end(Buffer(0));
});
-function forEach (xs, f) {
+function forEach(xs, f) {
for (var i = 0, l = xs.length; i < l; i++) {
f(xs[i], i);
}
-}
+}
\ No newline at end of file
diff --git a/test/parallel/test-stream3-pause-then-read.js b/test/parallel/test-stream3-pause-then-read.js
index 2dc5bd1ff2..4f8dc423b0 100644
--- a/test/parallel/test-stream3-pause-then-read.js
+++ b/test/parallel/test-stream3-pause-then-read.js
@@ -1,4 +1,5 @@
'use strict';
+
require('../common');
var assert = require('assert');
@@ -13,13 +14,8 @@ var expectEndingData = expectTotalData;
var r = new Readable({ highWaterMark: 1000 });
var chunks = totalChunks;
-r._read = function(n) {
- if (!(chunks % 2))
- setImmediate(push);
- else if (!(chunks % 3))
- process.nextTick(push);
- else
- push();
+r._read = function (n) {
+ if (!(chunks % 2)) setImmediate(push);else if (!(chunks % 3)) process.nextTick(push);else push();
};
var totalPushed = 0;
@@ -44,9 +40,7 @@ function readn(n, then) {
expectEndingData -= n;
(function read() {
var c = r.read(n);
- if (!c)
- r.once('readable', read);
- else {
+ if (!c) r.once('readable', read);else {
assert.equal(c.length, n);
assert(!r._readableState.flowing);
then();
@@ -85,11 +79,11 @@ function pipeLittle() {
console.error('pipe a little');
var w = new Writable();
var written = 0;
- w.on('finish', function() {
+ w.on('finish', function () {
assert.equal(written, 200);
setImmediate(read1234);
});
- w._write = function(chunk, encoding, cb) {
+ w._write = function (chunk, encoding, cb) {
written += chunk.length;
if (written >= 200) {
r.unpipe(w);
@@ -128,20 +122,19 @@ function resumePause() {
setImmediate(pipe);
}
-
function pipe() {
console.error('pipe the rest');
var w = new Writable();
var written = 0;
- w._write = function(chunk, encoding, cb) {
+ w._write = function (chunk, encoding, cb) {
written += chunk.length;
cb();
};
- w.on('finish', function() {
+ w.on('finish', function () {
console.error('written', written, totalPushed);
assert.equal(written, expectEndingData);
assert.equal(totalPushed, expectTotalData);
console.log('ok');
});
r.pipe(w);
-}
+}
\ No newline at end of file