aboutsummaryrefslogtreecommitdiff
path: root/node_modules/readable-stream
diff options
context:
space:
mode:
authorFlorian Dold <florian.dold@gmail.com>2017-04-20 03:09:25 +0200
committerFlorian Dold <florian.dold@gmail.com>2017-04-24 16:14:29 +0200
commit82f2b76e25a4a67e01ec67e5ebe39d14ad771ea8 (patch)
tree965f6eb89b84d65a62b49008fd972c004832ccd1 /node_modules/readable-stream
parente6e0cbc387c2a77b48e4065c229daa65bf1aa0fa (diff)
downloadwallet-core-82f2b76e25a4a67e01ec67e5ebe39d14ad771ea8.tar.xz
Reorganize module loading.
We now use webpack instead of SystemJS, effectively bundling modules into one file (plus commons chunks) for every entry point. This results in a much smaller extension size (almost half). Furthermore we use yarn/npm even for extension run-time dependencies. This relieves us from manually vendoring and building dependencies. It's also easier to understand for new developers familiar with node.
Diffstat (limited to 'node_modules/readable-stream')
-rw-r--r--node_modules/readable-stream/.npmignore6
-rw-r--r--node_modules/readable-stream/.travis.yml15
-rw-r--r--node_modules/readable-stream/LICENSE29
-rw-r--r--node_modules/readable-stream/README.md29
-rw-r--r--node_modules/readable-stream/duplex.js2
-rw-r--r--node_modules/readable-stream/lib/_stream_readable.js305
-rw-r--r--node_modules/readable-stream/lib/_stream_transform.js16
-rw-r--r--node_modules/readable-stream/lib/_stream_writable.js112
-rw-r--r--node_modules/readable-stream/package.json33
-rw-r--r--node_modules/readable-stream/passthrough.js2
-rw-r--r--node_modules/readable-stream/readable.js31
-rw-r--r--node_modules/readable-stream/transform.js2
-rw-r--r--node_modules/readable-stream/writable.js9
13 files changed, 379 insertions, 212 deletions
diff --git a/node_modules/readable-stream/.npmignore b/node_modules/readable-stream/.npmignore
index 38344f87a..6d270c6cc 100644
--- a/node_modules/readable-stream/.npmignore
+++ b/node_modules/readable-stream/.npmignore
@@ -2,4 +2,8 @@ build/
test/
examples/
fs.js
-zlib.js \ No newline at end of file
+zlib.js
+.zuul.yml
+.nyc_output
+coverage
+docs/
diff --git a/node_modules/readable-stream/.travis.yml b/node_modules/readable-stream/.travis.yml
index 1b8211846..76b4b0cfc 100644
--- a/node_modules/readable-stream/.travis.yml
+++ b/node_modules/readable-stream/.travis.yml
@@ -7,9 +7,6 @@ notifications:
email: false
matrix:
fast_finish: true
- allow_failures:
- - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest"
- - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest"
include:
- node_js: '0.8'
env: TASK=test
@@ -29,8 +26,10 @@ matrix:
env: TASK=test
- node_js: 5
env: TASK=test
- - node_js: 5
- env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest"
+ - node_js: 6
+ env: TASK=test
+ - node_js: 7
+ env: TASK=test
- node_js: 5
env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest"
- node_js: 5
@@ -40,11 +39,9 @@ matrix:
- node_js: 5
env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="-3..latest"
- node_js: 5
- env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest"
- - node_js: 5
- env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest"
- - node_js: 5
env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest"
+ - node_js: 5
+ env: TASK=browser BROWSER_NAME=microsoftedge BROWSER_VERSION=latest
script: "npm run $TASK"
env:
global:
diff --git a/node_modules/readable-stream/LICENSE b/node_modules/readable-stream/LICENSE
index e3d4e695a..2873b3b2e 100644
--- a/node_modules/readable-stream/LICENSE
+++ b/node_modules/readable-stream/LICENSE
@@ -1,3 +1,31 @@
+Node.js is licensed for use as follows:
+
+"""
+Copyright Node.js contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
+"""
+
+This license applies to parts of Node.js originating from the
+https://github.com/joyent/node repository:
+
+"""
Copyright Joyent, Inc. and other Node contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
@@ -16,3 +44,4 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
+"""
diff --git a/node_modules/readable-stream/README.md b/node_modules/readable-stream/README.md
index 86b95a3bf..8b6e5d39b 100644
--- a/node_modules/readable-stream/README.md
+++ b/node_modules/readable-stream/README.md
@@ -1,6 +1,6 @@
# readable-stream
-***Node-core v5.8.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream)
+***Node-core v7.0.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream)
[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/)
@@ -16,14 +16,33 @@ npm install --save readable-stream
***Node-core streams for userland***
This package is a mirror of the Streams2 and Streams3 implementations in
-Node-core, including [documentation](doc/stream.markdown).
+Node-core.
+
+Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v7.8.0/docs/api/).
If you want to guarantee a stable streams base, regardless of what version of
Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html).
-As of version 2.0.0 **readable-stream** uses semantic versioning.
+As of version 2.0.0 **readable-stream** uses semantic versioning.
+
+# Streams Working Group
+
+`readable-stream` is maintained by the Streams Working Group, which
+oversees the development and maintenance of the Streams API within
+Node.js. The responsibilities of the Streams Working Group include:
+
+* Addressing stream issues on the Node.js issue tracker.
+* Authoring and editing stream documentation within the Node.js project.
+* Reviewing changes to stream subclasses within the Node.js project.
+* Redirecting changes to streams from the Node.js project to this
+ project.
+* Assisting in the implementation of stream providers within Node.js.
+* Recommending versions of `readable-stream` to be included in Node.js.
+* Messaging about the future of streams to give the community advance
+ notice of changes.
-# Streams WG Team Members
+<a name="members"></a>
+## Team Members
* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) &lt;christopher.s.dickinson@gmail.com&gt;
- Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B
@@ -34,3 +53,5 @@ As of version 2.0.0 **readable-stream** uses semantic versioning.
* **Sam Newman** ([@sonewman](https://github.com/sonewman)) &lt;newmansam@outlook.com&gt;
* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) &lt;mathiasbuus@gmail.com&gt;
* **Domenic Denicola** ([@domenic](https://github.com/domenic)) &lt;d@domenic.me&gt;
+* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) &lt;matteo.collina@gmail.com&gt;
+ - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E
diff --git a/node_modules/readable-stream/duplex.js b/node_modules/readable-stream/duplex.js
index ca807af87..46924cbfd 100644
--- a/node_modules/readable-stream/duplex.js
+++ b/node_modules/readable-stream/duplex.js
@@ -1 +1 @@
-module.exports = require("./lib/_stream_duplex.js")
+module.exports = require('./readable').Duplex
diff --git a/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/readable-stream/lib/_stream_readable.js
index 54a9d5c55..b19b2088b 100644
--- a/node_modules/readable-stream/lib/_stream_readable.js
+++ b/node_modules/readable-stream/lib/_stream_readable.js
@@ -11,31 +11,27 @@ var isArray = require('isarray');
/*</replacement>*/
/*<replacement>*/
-var Buffer = require('buffer').Buffer;
+var Duplex;
/*</replacement>*/
Readable.ReadableState = ReadableState;
-var EE = require('events');
-
/*<replacement>*/
+var EE = require('events').EventEmitter;
+
var EElistenerCount = function (emitter, type) {
return emitter.listeners(type).length;
};
/*</replacement>*/
/*<replacement>*/
-var Stream;
-(function () {
- try {
- Stream = require('st' + 'ream');
- } catch (_) {} finally {
- if (!Stream) Stream = require('events').EventEmitter;
- }
-})();
+var Stream = require('./internal/streams/stream');
/*</replacement>*/
var Buffer = require('buffer').Buffer;
+/*<replacement>*/
+var bufferShim = require('buffer-shims');
+/*</replacement>*/
/*<replacement>*/
var util = require('core-util-is');
@@ -44,7 +40,7 @@ util.inherits = require('inherits');
/*<replacement>*/
var debugUtil = require('util');
-var debug = undefined;
+var debug = void 0;
if (debugUtil && debugUtil.debuglog) {
debug = debugUtil.debuglog('stream');
} else {
@@ -52,11 +48,27 @@ if (debugUtil && debugUtil.debuglog) {
}
/*</replacement>*/
+var BufferList = require('./internal/streams/BufferList');
var StringDecoder;
util.inherits(Readable, Stream);
-var Duplex;
+var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
+
+function prependListener(emitter, event, fn) {
+ // Sadly this is not cacheable as some libraries bundle their own
+ // event emitter implementation with them.
+ if (typeof emitter.prependListener === 'function') {
+ return emitter.prependListener(event, fn);
+ } else {
+ // This is a hack to make sure that our error handler is attached before any
+ // userland ones. NEVER DO THIS. This is here only because this code needs
+ // to continue to work with older versions of Node.js that do not include
+ // the prependListener() method. The goal is to eventually remove this hack.
+ if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
+ }
+}
+
function ReadableState(options, stream) {
Duplex = Duplex || require('./_stream_duplex');
@@ -75,9 +87,12 @@ function ReadableState(options, stream) {
this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
// cast to ints.
- this.highWaterMark = ~ ~this.highWaterMark;
+ this.highWaterMark = ~~this.highWaterMark;
- this.buffer = [];
+ // A linked list is used to store data chunks instead of an array because the
+ // linked list can remove elements from the beginning faster than
+ // array.shift()
+ this.buffer = new BufferList();
this.length = 0;
this.pipes = null;
this.pipesCount = 0;
@@ -123,7 +138,6 @@ function ReadableState(options, stream) {
}
}
-var Duplex;
function Readable(options) {
Duplex = Duplex || require('./_stream_duplex');
@@ -149,7 +163,7 @@ Readable.prototype.push = function (chunk, encoding) {
if (!state.objectMode && typeof chunk === 'string') {
encoding = encoding || state.defaultEncoding;
if (encoding !== state.encoding) {
- chunk = new Buffer(chunk, encoding);
+ chunk = bufferShim.from(chunk, encoding);
encoding = '';
}
}
@@ -179,8 +193,8 @@ function readableAddChunk(stream, state, chunk, encoding, addToFront) {
var e = new Error('stream.push() after EOF');
stream.emit('error', e);
} else if (state.endEmitted && addToFront) {
- var e = new Error('stream.unshift() after end event');
- stream.emit('error', e);
+ var _e = new Error('stream.unshift() after end event');
+ stream.emit('error', _e);
} else {
var skipAdd;
if (state.decoder && !addToFront && !encoding) {
@@ -240,7 +254,8 @@ function computeNewHighWaterMark(n) {
if (n >= MAX_HWM) {
n = MAX_HWM;
} else {
- // Get the next highest power of 2
+ // Get the next highest power of 2 to prevent increasing hwm excessively in
+ // tiny amounts
n--;
n |= n >>> 1;
n |= n >>> 2;
@@ -252,44 +267,34 @@ function computeNewHighWaterMark(n) {
return n;
}
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
function howMuchToRead(n, state) {
- if (state.length === 0 && state.ended) return 0;
-
- if (state.objectMode) return n === 0 ? 0 : 1;
-
- if (n === null || isNaN(n)) {
- // only flow one buffer at a time
- if (state.flowing && state.buffer.length) return state.buffer[0].length;else return state.length;
+ if (n <= 0 || state.length === 0 && state.ended) return 0;
+ if (state.objectMode) return 1;
+ if (n !== n) {
+ // Only flow one buffer at a time
+ if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
}
-
- if (n <= 0) return 0;
-
- // If we're asking for more than the target buffer level,
- // then raise the water mark. Bump up to the next highest
- // power of 2, to prevent increasing it excessively in tiny
- // amounts.
+ // If we're asking for more than the current hwm, then raise the hwm.
if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
-
- // don't have that much. return null, unless we've ended.
- if (n > state.length) {
- if (!state.ended) {
- state.needReadable = true;
- return 0;
- } else {
- return state.length;
- }
+ if (n <= state.length) return n;
+ // Don't have enough
+ if (!state.ended) {
+ state.needReadable = true;
+ return 0;
}
-
- return n;
+ return state.length;
}
// you can override either this method, or the async _read(n) below.
Readable.prototype.read = function (n) {
debug('read', n);
+ n = parseInt(n, 10);
var state = this._readableState;
var nOrig = n;
- if (typeof n !== 'number' || n > 0) state.emittedReadable = false;
+ if (n !== 0) state.emittedReadable = false;
// if we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
@@ -345,9 +350,7 @@ Readable.prototype.read = function (n) {
if (state.ended || state.reading) {
doRead = false;
debug('reading or ended', doRead);
- }
-
- if (doRead) {
+ } else if (doRead) {
debug('do read');
state.reading = true;
state.sync = true;
@@ -356,28 +359,29 @@ Readable.prototype.read = function (n) {
// call internal read method
this._read(state.highWaterMark);
state.sync = false;
+ // If _read pushed data synchronously, then `reading` will be false,
+ // and we need to re-evaluate how much data we can return to the user.
+ if (!state.reading) n = howMuchToRead(nOrig, state);
}
- // If _read pushed data synchronously, then `reading` will be false,
- // and we need to re-evaluate how much data we can return to the user.
- if (doRead && !state.reading) n = howMuchToRead(nOrig, state);
-
var ret;
if (n > 0) ret = fromList(n, state);else ret = null;
if (ret === null) {
state.needReadable = true;
n = 0;
+ } else {
+ state.length -= n;
}
- state.length -= n;
+ if (state.length === 0) {
+ // If we have nothing in the buffer, then we want to know
+ // as soon as we *do* get something into the buffer.
+ if (!state.ended) state.needReadable = true;
- // If we have nothing in the buffer, then we want to know
- // as soon as we *do* get something into the buffer.
- if (state.length === 0 && !state.ended) state.needReadable = true;
-
- // If we tried to read() past the EOF, then emit end on the next tick.
- if (nOrig !== n && state.ended && state.length === 0) endReadable(this);
+ // If we tried to read() past the EOF, then emit end on the next tick.
+ if (nOrig !== n && state.ended) endReadable(this);
+ }
if (ret !== null) this.emit('data', ret);
@@ -456,7 +460,7 @@ function maybeReadMore_(stream, state) {
// for virtual (non-string, non-buffer) streams, "length" is somewhat
// arbitrary, and perhaps not very meaningful.
Readable.prototype._read = function (n) {
- this.emit('error', new Error('not implemented'));
+ this.emit('error', new Error('_read() is not implemented'));
};
Readable.prototype.pipe = function (dest, pipeOpts) {
@@ -525,17 +529,25 @@ Readable.prototype.pipe = function (dest, pipeOpts) {
if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
}
+ // If the user pushes more data while we're writing to dest then we'll end up
+ // in ondata again. However, we only want to increase awaitDrain once because
+ // dest will only emit one 'drain' event for the multiple writes.
+ // => Introduce a guard on increasing awaitDrain.
+ var increasedAwaitDrain = false;
src.on('data', ondata);
function ondata(chunk) {
debug('ondata');
+ increasedAwaitDrain = false;
var ret = dest.write(chunk);
- if (false === ret) {
+ if (false === ret && !increasedAwaitDrain) {
// If the user unpiped during `dest.write()`, it is possible
// to get stuck in a permanently paused state if that write
// also returned false.
- if (state.pipesCount === 1 && state.pipes[0] === dest && src.listenerCount('data') === 1 && !cleanedUp) {
+ // => Check whether `dest` is still a piping destination.
+ if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
debug('false write response, pause', src._readableState.awaitDrain);
src._readableState.awaitDrain++;
+ increasedAwaitDrain = true;
}
src.pause();
}
@@ -549,9 +561,9 @@ Readable.prototype.pipe = function (dest, pipeOpts) {
dest.removeListener('error', onerror);
if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
}
- // This is a brutally ugly hack to make sure that our error handler
- // is attached before any userland ones. NEVER DO THIS.
- if (!dest._events || !dest._events.error) dest.on('error', onerror);else if (isArray(dest._events.error)) dest._events.error.unshift(onerror);else dest._events.error = [onerror, dest._events.error];
+
+ // Make sure our error handler is attached before userland ones.
+ prependListener(dest, 'error', onerror);
// Both close and finish should trigger unpipe, but only once.
function onclose() {
@@ -626,16 +638,16 @@ Readable.prototype.unpipe = function (dest) {
state.pipesCount = 0;
state.flowing = false;
- for (var _i = 0; _i < len; _i++) {
- dests[_i].emit('unpipe', this);
+ for (var i = 0; i < len; i++) {
+ dests[i].emit('unpipe', this);
}return this;
}
// try to find the right one.
- var i = indexOf(state.pipes, dest);
- if (i === -1) return this;
+ var index = indexOf(state.pipes, dest);
+ if (index === -1) return this;
- state.pipes.splice(i, 1);
+ state.pipes.splice(index, 1);
state.pipesCount -= 1;
if (state.pipesCount === 1) state.pipes = state.pipes[0];
@@ -649,18 +661,14 @@ Readable.prototype.unpipe = function (dest) {
Readable.prototype.on = function (ev, fn) {
var res = Stream.prototype.on.call(this, ev, fn);
- // If listening to data, and it has not explicitly been paused,
- // then call resume to start the flow of data on the next tick.
- if (ev === 'data' && false !== this._readableState.flowing) {
- this.resume();
- }
-
- if (ev === 'readable' && !this._readableState.endEmitted) {
+ if (ev === 'data') {
+ // Start flowing on next tick if stream isn't explicitly paused
+ if (this._readableState.flowing !== false) this.resume();
+ } else if (ev === 'readable') {
var state = this._readableState;
- if (!state.readableListening) {
- state.readableListening = true;
+ if (!state.endEmitted && !state.readableListening) {
+ state.readableListening = state.needReadable = true;
state.emittedReadable = false;
- state.needReadable = true;
if (!state.reading) {
processNextTick(nReadingNextTick, this);
} else if (state.length) {
@@ -704,6 +712,7 @@ function resume_(stream, state) {
}
state.resumeScheduled = false;
+ state.awaitDrain = 0;
stream.emit('resume');
flow(stream);
if (state.flowing && !state.reading) stream.read(0);
@@ -722,11 +731,7 @@ Readable.prototype.pause = function () {
function flow(stream) {
var state = stream._readableState;
debug('flow', state.flowing);
- if (state.flowing) {
- do {
- var chunk = stream.read();
- } while (null !== chunk && state.flowing);
- }
+ while (state.flowing && stream.read() !== null) {}
}
// wrap an old-style stream as the async data source.
@@ -774,10 +779,9 @@ Readable.prototype.wrap = function (stream) {
}
// proxy certain important events.
- var events = ['error', 'close', 'destroy', 'pause', 'resume'];
- forEach(events, function (ev) {
- stream.on(ev, self.emit.bind(self, ev));
- });
+ for (var n = 0; n < kProxyEvents.length; n++) {
+ stream.on(kProxyEvents[n], self.emit.bind(self, kProxyEvents[n]));
+ }
// when we try to consume some more bytes, simply unpause the
// underlying stream.
@@ -797,50 +801,101 @@ Readable._fromList = fromList;
// Pluck off n bytes from an array of buffers.
// Length is the combined lengths of all the buffers in the list.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
function fromList(n, state) {
- var list = state.buffer;
- var length = state.length;
- var stringMode = !!state.decoder;
- var objectMode = !!state.objectMode;
- var ret;
+ // nothing buffered
+ if (state.length === 0) return null;
- // nothing in the list, definitely empty.
- if (list.length === 0) return null;
-
- if (length === 0) ret = null;else if (objectMode) ret = list.shift();else if (!n || n >= length) {
- // read it all, truncate the array.
- if (stringMode) ret = list.join('');else if (list.length === 1) ret = list[0];else ret = Buffer.concat(list, length);
- list.length = 0;
+ var ret;
+ if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
+ // read it all, truncate the list
+ if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
+ state.buffer.clear();
} else {
- // read just some of it.
- if (n < list[0].length) {
- // just take a part of the first list item.
- // slice is the same for buffers and strings.
- var buf = list[0];
- ret = buf.slice(0, n);
- list[0] = buf.slice(n);
- } else if (n === list[0].length) {
- // first list is a perfect match
- ret = list.shift();
- } else {
- // complex case.
- // we have enough to cover it, but it spans past the first buffer.
- if (stringMode) ret = '';else ret = new Buffer(n);
-
- var c = 0;
- for (var i = 0, l = list.length; i < l && c < n; i++) {
- var buf = list[0];
- var cpy = Math.min(n - c, buf.length);
+ // read part of list
+ ret = fromListPartial(n, state.buffer, state.decoder);
+ }
- if (stringMode) ret += buf.slice(0, cpy);else buf.copy(ret, c, 0, cpy);
+ return ret;
+}
- if (cpy < buf.length) list[0] = buf.slice(cpy);else list.shift();
+// Extracts only enough buffered data to satisfy the amount requested.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function fromListPartial(n, list, hasStrings) {
+ var ret;
+ if (n < list.head.data.length) {
+ // slice is the same for buffers and strings
+ ret = list.head.data.slice(0, n);
+ list.head.data = list.head.data.slice(n);
+ } else if (n === list.head.data.length) {
+ // first chunk is a perfect match
+ ret = list.shift();
+ } else {
+ // result spans more than one buffer
+ ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
+ }
+ return ret;
+}
- c += cpy;
+// Copies a specified amount of characters from the list of buffered data
+// chunks.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function copyFromBufferString(n, list) {
+ var p = list.head;
+ var c = 1;
+ var ret = p.data;
+ n -= ret.length;
+ while (p = p.next) {
+ var str = p.data;
+ var nb = n > str.length ? str.length : n;
+ if (nb === str.length) ret += str;else ret += str.slice(0, n);
+ n -= nb;
+ if (n === 0) {
+ if (nb === str.length) {
+ ++c;
+ if (p.next) list.head = p.next;else list.head = list.tail = null;
+ } else {
+ list.head = p;
+ p.data = str.slice(nb);
}
+ break;
}
+ ++c;
}
+ list.length -= c;
+ return ret;
+}
+// Copies a specified amount of bytes from the list of buffered data chunks.
+// This function is designed to be inlinable, so please take care when making
+// changes to the function body.
+function copyFromBuffer(n, list) {
+ var ret = bufferShim.allocUnsafe(n);
+ var p = list.head;
+ var c = 1;
+ p.data.copy(ret);
+ n -= p.data.length;
+ while (p = p.next) {
+ var buf = p.data;
+ var nb = n > buf.length ? buf.length : n;
+ buf.copy(ret, ret.length - n, 0, nb);
+ n -= nb;
+ if (n === 0) {
+ if (nb === buf.length) {
+ ++c;
+ if (p.next) list.head = p.next;else list.head = list.tail = null;
+ } else {
+ list.head = p;
+ p.data = buf.slice(nb);
+ }
+ break;
+ }
+ ++c;
+ }
+ list.length -= c;
return ret;
}
@@ -849,7 +904,7 @@ function endReadable(stream) {
// If we get here before consuming all the bytes, then that is a
// bug in node. Should never happen.
- if (state.length > 0) throw new Error('endReadable called on non-empty stream');
+ if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
if (!state.endEmitted) {
state.ended = true;
diff --git a/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/readable-stream/lib/_stream_transform.js
index 625cdc176..cd2583207 100644
--- a/node_modules/readable-stream/lib/_stream_transform.js
+++ b/node_modules/readable-stream/lib/_stream_transform.js
@@ -94,7 +94,6 @@ function Transform(options) {
this._transformState = new TransformState(this);
- // when the writable side finishes, then flush out anything remaining.
var stream = this;
// start out asking for a readable event once data is transformed.
@@ -111,9 +110,10 @@ function Transform(options) {
if (typeof options.flush === 'function') this._flush = options.flush;
}
+ // When the writable side finishes, then flush out anything remaining.
this.once('prefinish', function () {
- if (typeof this._flush === 'function') this._flush(function (er) {
- done(stream, er);
+ if (typeof this._flush === 'function') this._flush(function (er, data) {
+ done(stream, er, data);
});else done(stream);
});
}
@@ -134,7 +134,7 @@ Transform.prototype.push = function (chunk, encoding) {
// an error, then that'll put the hurt on the whole operation. If you
// never call cb(), then you'll never get another chunk.
Transform.prototype._transform = function (chunk, encoding, cb) {
- throw new Error('not implemented');
+ throw new Error('_transform() is not implemented');
};
Transform.prototype._write = function (chunk, encoding, cb) {
@@ -164,17 +164,19 @@ Transform.prototype._read = function (n) {
}
};
-function done(stream, er) {
+function done(stream, er, data) {
if (er) return stream.emit('error', er);
+ if (data !== null && data !== undefined) stream.push(data);
+
// if there's nothing in the write buffer, then that means
// that nothing more will ever be provided
var ws = stream._writableState;
var ts = stream._transformState;
- if (ws.length) throw new Error('calling transform done when ws.length != 0');
+ if (ws.length) throw new Error('Calling transform done when ws.length != 0');
- if (ts.transforming) throw new Error('calling transform done when still transforming');
+ if (ts.transforming) throw new Error('Calling transform done when still transforming');
return stream.push(null);
} \ No newline at end of file
diff --git a/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/readable-stream/lib/_stream_writable.js
index 95916c992..15db03868 100644
--- a/node_modules/readable-stream/lib/_stream_writable.js
+++ b/node_modules/readable-stream/lib/_stream_writable.js
@@ -15,7 +15,7 @@ var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.
/*</replacement>*/
/*<replacement>*/
-var Buffer = require('buffer').Buffer;
+var Duplex;
/*</replacement>*/
Writable.WritableState = WritableState;
@@ -32,17 +32,13 @@ var internalUtil = {
/*</replacement>*/
/*<replacement>*/
-var Stream;
-(function () {
- try {
- Stream = require('st' + 'ream');
- } catch (_) {} finally {
- if (!Stream) Stream = require('events').EventEmitter;
- }
-})();
+var Stream = require('./internal/streams/stream');
/*</replacement>*/
var Buffer = require('buffer').Buffer;
+/*<replacement>*/
+var bufferShim = require('buffer-shims');
+/*</replacement>*/
util.inherits(Writable, Stream);
@@ -55,7 +51,6 @@ function WriteReq(chunk, encoding, cb) {
this.next = null;
}
-var Duplex;
function WritableState(options, stream) {
Duplex = Duplex || require('./_stream_duplex');
@@ -75,8 +70,9 @@ function WritableState(options, stream) {
this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
// cast to ints.
- this.highWaterMark = ~ ~this.highWaterMark;
+ this.highWaterMark = ~~this.highWaterMark;
+ // drain event flag.
this.needDrain = false;
// at the start of calling end()
this.ending = false;
@@ -146,13 +142,12 @@ function WritableState(options, stream) {
// count buffered requests
this.bufferedRequestCount = 0;
- // create the two objects needed to store the corked requests
- // they are not a linked list, as no new elements are inserted in there
+ // allocate the first CorkedRequest, there is always
+ // one allocated and free to use, and we maintain at most two
this.corkedRequestsFree = new CorkedRequest(this);
- this.corkedRequestsFree.next = new CorkedRequest(this);
}
-WritableState.prototype.getBuffer = function writableStateGetBuffer() {
+WritableState.prototype.getBuffer = function getBuffer() {
var current = this.bufferedRequest;
var out = [];
while (current) {
@@ -172,13 +167,37 @@ WritableState.prototype.getBuffer = function writableStateGetBuffer() {
} catch (_) {}
})();
-var Duplex;
+// Test _writableState for inheritance to account for Duplex streams,
+// whose prototype chain only points to Readable.
+var realHasInstance;
+if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
+ realHasInstance = Function.prototype[Symbol.hasInstance];
+ Object.defineProperty(Writable, Symbol.hasInstance, {
+ value: function (object) {
+ if (realHasInstance.call(this, object)) return true;
+
+ return object && object._writableState instanceof WritableState;
+ }
+ });
+} else {
+ realHasInstance = function (object) {
+ return object instanceof this;
+ };
+}
+
function Writable(options) {
Duplex = Duplex || require('./_stream_duplex');
- // Writable ctor is applied to Duplexes, though they're not
- // instanceof Writable, they're instanceof Readable.
- if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options);
+ // Writable ctor is applied to Duplexes, too.
+ // `realHasInstance` is necessary because using plain `instanceof`
+ // would return false, as no `_writableState` property is attached.
+
+ // Trying to use the custom `instanceof` for Writable here will also break the
+ // Node.js LazyTransform implementation, which has a non-trivial getter for
+ // `_writableState` that would lead to infinite recursion.
+ if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
+ return new Writable(options);
+ }
this._writableState = new WritableState(options, this);
@@ -196,7 +215,7 @@ function Writable(options) {
// Otherwise people can pipe Writable streams, which is just wrong.
Writable.prototype.pipe = function () {
- this.emit('error', new Error('Cannot pipe. Not readable.'));
+ this.emit('error', new Error('Cannot pipe, not readable'));
};
function writeAfterEnd(stream, cb) {
@@ -206,16 +225,19 @@ function writeAfterEnd(stream, cb) {
processNextTick(cb, er);
}
-// If we get something that is not a buffer, string, null, or undefined,
-// and we're not in objectMode, then that's an error.
-// Otherwise stream chunks are all considered to be of length=1, and the
-// watermarks determine how many objects to keep in the buffer, rather than
-// how many bytes or characters.
+// Checks that a user-supplied chunk is valid, especially for the particular
+// mode the stream is in. Currently this means that `null` is never accepted
+// and undefined/non-string values are only allowed in object mode.
function validChunk(stream, state, chunk, cb) {
var valid = true;
+ var er = false;
- if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) {
- var er = new TypeError('Invalid non-string/buffer chunk');
+ if (chunk === null) {
+ er = new TypeError('May not write null values to stream');
+ } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
+ er = new TypeError('Invalid non-string/buffer chunk');
+ }
+ if (er) {
stream.emit('error', er);
processNextTick(cb, er);
valid = false;
@@ -226,19 +248,20 @@ function validChunk(stream, state, chunk, cb) {
Writable.prototype.write = function (chunk, encoding, cb) {
var state = this._writableState;
var ret = false;
+ var isBuf = Buffer.isBuffer(chunk);
if (typeof encoding === 'function') {
cb = encoding;
encoding = null;
}
- if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
+ if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
if (typeof cb !== 'function') cb = nop;
- if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) {
+ if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
state.pendingcb++;
- ret = writeOrBuffer(this, state, chunk, encoding, cb);
+ ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
}
return ret;
@@ -265,11 +288,12 @@ Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
this._writableState.defaultEncoding = encoding;
+ return this;
};
function decodeChunk(state, chunk, encoding) {
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
- chunk = new Buffer(chunk, encoding);
+ chunk = bufferShim.from(chunk, encoding);
}
return chunk;
}
@@ -277,10 +301,11 @@ function decodeChunk(state, chunk, encoding) {
// if we're already writing something, then just put this
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
-function writeOrBuffer(stream, state, chunk, encoding, cb) {
- chunk = decodeChunk(state, chunk, encoding);
-
- if (Buffer.isBuffer(chunk)) encoding = 'buffer';
+function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
+ if (!isBuf) {
+ chunk = decodeChunk(state, chunk, encoding);
+ if (Buffer.isBuffer(chunk)) encoding = 'buffer';
+ }
var len = state.objectMode ? 1 : chunk.length;
state.length += len;
@@ -349,8 +374,8 @@ function onwrite(stream, er) {
asyncWrite(afterWrite, stream, state, finished, cb);
/*</replacement>*/
} else {
- afterWrite(stream, state, finished, cb);
- }
+ afterWrite(stream, state, finished, cb);
+ }
}
}
@@ -392,12 +417,16 @@ function clearBuffer(stream, state) {
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
- // doWrite is always async, defer these to save a bit of time
+ // doWrite is almost always async, defer these to save a bit of time
// as the hot path ends with doWrite
state.pendingcb++;
state.lastBufferedRequest = null;
- state.corkedRequestsFree = holder.next;
- holder.next = null;
+ if (holder.next) {
+ state.corkedRequestsFree = holder.next;
+ holder.next = null;
+ } else {
+ state.corkedRequestsFree = new CorkedRequest(state);
+ }
} else {
// Slow case, write chunks one-by-one
while (entry) {
@@ -426,7 +455,7 @@ function clearBuffer(stream, state) {
}
Writable.prototype._write = function (chunk, encoding, cb) {
- cb(new Error('not implemented'));
+ cb(new Error('_write() is not implemented'));
};
Writable.prototype._writev = null;
@@ -497,7 +526,6 @@ function CorkedRequest(state) {
this.next = null;
this.entry = null;
-
this.finish = function (err) {
var entry = _this.entry;
_this.entry = null;
diff --git a/node_modules/readable-stream/package.json b/node_modules/readable-stream/package.json
index d77b090ec..f4ab1a9a4 100644
--- a/node_modules/readable-stream/package.json
+++ b/node_modules/readable-stream/package.json
@@ -1,25 +1,33 @@
{
"name": "readable-stream",
- "version": "2.0.6",
+ "version": "2.2.9",
"description": "Streams3, a user-land copy of the stream library from Node.js",
"main": "readable.js",
"dependencies": {
+ "buffer-shims": "~1.0.0",
"core-util-is": "~1.0.0",
- "inherits": "~2.0.1",
"isarray": "~1.0.0",
+ "inherits": "~2.0.1",
"process-nextick-args": "~1.0.6",
- "string_decoder": "~0.10.x",
+ "string_decoder": "~1.0.0",
"util-deprecate": "~1.0.1"
},
"devDependencies": {
- "tap": "~0.2.6",
+ "assert": "~1.4.0",
+ "babel-polyfill": "^6.9.1",
+ "buffer": "^4.9.0",
+ "nyc": "^6.4.0",
+ "tap": "~0.7.1",
"tape": "~4.5.1",
- "zuul": "~3.9.0"
+ "zuul": "~3.10.0"
},
"scripts": {
"test": "tap test/parallel/*.js test/ours/*.js",
- "browser": "npm run write-zuul && zuul -- test/browser.js",
- "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml"
+ "browser": "npm run write-zuul && zuul --browser-retries 2 -- test/browser.js",
+ "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml",
+ "local": "zuul --local 3000 --no-coverage -- test/browser.js",
+ "cover": "nyc npm test",
+ "report": "nyc report --reporter=lcov"
},
"repository": {
"type": "git",
@@ -31,7 +39,16 @@
"pipe"
],
"browser": {
- "util": false
+ "util": false,
+ "./readable.js": "./readable-browser.js",
+ "./writable.js": "./writable-browser.js",
+ "./duplex.js": "./duplex-browser.js",
+ "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js"
+ },
+ "nyc": {
+ "include": [
+ "lib/**.js"
+ ]
},
"license": "MIT"
}
diff --git a/node_modules/readable-stream/passthrough.js b/node_modules/readable-stream/passthrough.js
index 27e8d8a55..ffd791d7f 100644
--- a/node_modules/readable-stream/passthrough.js
+++ b/node_modules/readable-stream/passthrough.js
@@ -1 +1 @@
-module.exports = require("./lib/_stream_passthrough.js")
+module.exports = require('./readable').PassThrough
diff --git a/node_modules/readable-stream/readable.js b/node_modules/readable-stream/readable.js
index 6222a5798..ec89ec533 100644
--- a/node_modules/readable-stream/readable.js
+++ b/node_modules/readable-stream/readable.js
@@ -1,12 +1,19 @@
-var Stream = (function (){
- try {
- return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify
- } catch(_){}
-}());
-exports = module.exports = require('./lib/_stream_readable.js');
-exports.Stream = Stream || exports;
-exports.Readable = exports;
-exports.Writable = require('./lib/_stream_writable.js');
-exports.Duplex = require('./lib/_stream_duplex.js');
-exports.Transform = require('./lib/_stream_transform.js');
-exports.PassThrough = require('./lib/_stream_passthrough.js');
+var Stream = require('stream');
+if (process.env.READABLE_STREAM === 'disable' && Stream) {
+ module.exports = Stream;
+ exports = module.exports = Stream.Readable;
+ exports.Readable = Stream.Readable;
+ exports.Writable = Stream.Writable;
+ exports.Duplex = Stream.Duplex;
+ exports.Transform = Stream.Transform;
+ exports.PassThrough = Stream.PassThrough;
+ exports.Stream = Stream;
+} else {
+ exports = module.exports = require('./lib/_stream_readable.js');
+ exports.Stream = Stream || exports;
+ exports.Readable = exports;
+ exports.Writable = require('./lib/_stream_writable.js');
+ exports.Duplex = require('./lib/_stream_duplex.js');
+ exports.Transform = require('./lib/_stream_transform.js');
+ exports.PassThrough = require('./lib/_stream_passthrough.js');
+}
diff --git a/node_modules/readable-stream/transform.js b/node_modules/readable-stream/transform.js
index 5d482f078..b1baba26d 100644
--- a/node_modules/readable-stream/transform.js
+++ b/node_modules/readable-stream/transform.js
@@ -1 +1 @@
-module.exports = require("./lib/_stream_transform.js")
+module.exports = require('./readable').Transform
diff --git a/node_modules/readable-stream/writable.js b/node_modules/readable-stream/writable.js
index e1e9efdf3..634ddcbe1 100644
--- a/node_modules/readable-stream/writable.js
+++ b/node_modules/readable-stream/writable.js
@@ -1 +1,8 @@
-module.exports = require("./lib/_stream_writable.js")
+var Stream = require("stream")
+var Writable = require("./lib/_stream_writable.js")
+
+if (process.env.READABLE_STREAM === 'disable') {
+ module.exports = Stream && Stream.Writable || Writable
+}
+
+module.exports = Writable