From a851c3fb33335fb42de8280c6bad2a6699bb746f Mon Sep 17 00:00:00 2001 From: Florian Dold Date: Sat, 26 Nov 2016 17:04:35 +0100 Subject: update node_modules --- .../bl/node_modules/readable-stream/.npmignore | 5 - .../bl/node_modules/readable-stream/.travis.yml | 52 - .../bl/node_modules/readable-stream/.zuul.yml | 1 - .../bl/node_modules/readable-stream/LICENSE | 18 - .../bl/node_modules/readable-stream/README.md | 36 - .../readable-stream/doc/stream.markdown | 1760 -------------------- .../readable-stream/doc/wg-meetings/2015-01-30.md | 60 - .../bl/node_modules/readable-stream/duplex.js | 1 - .../readable-stream/lib/_stream_duplex.js | 75 - .../readable-stream/lib/_stream_passthrough.js | 26 - .../readable-stream/lib/_stream_readable.js | 880 ---------- .../readable-stream/lib/_stream_transform.js | 180 -- .../readable-stream/lib/_stream_writable.js | 516 ------ .../bl/node_modules/readable-stream/package.json | 37 - .../bl/node_modules/readable-stream/passthrough.js | 1 - .../bl/node_modules/readable-stream/readable.js | 12 - .../bl/node_modules/readable-stream/transform.js | 1 - .../bl/node_modules/readable-stream/writable.js | 1 - 18 files changed, 3662 deletions(-) delete mode 100644 node_modules/bl/node_modules/readable-stream/.npmignore delete mode 100644 node_modules/bl/node_modules/readable-stream/.travis.yml delete mode 100644 node_modules/bl/node_modules/readable-stream/.zuul.yml delete mode 100644 node_modules/bl/node_modules/readable-stream/LICENSE delete mode 100644 node_modules/bl/node_modules/readable-stream/README.md delete mode 100644 node_modules/bl/node_modules/readable-stream/doc/stream.markdown delete mode 100644 node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md delete mode 100644 node_modules/bl/node_modules/readable-stream/duplex.js delete mode 100644 node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js delete mode 100644 node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js delete mode 100644 node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js delete mode 100644 node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js delete mode 100644 node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js delete mode 100644 node_modules/bl/node_modules/readable-stream/package.json delete mode 100644 node_modules/bl/node_modules/readable-stream/passthrough.js delete mode 100644 node_modules/bl/node_modules/readable-stream/readable.js delete mode 100644 node_modules/bl/node_modules/readable-stream/transform.js delete mode 100644 node_modules/bl/node_modules/readable-stream/writable.js (limited to 'node_modules/bl') diff --git a/node_modules/bl/node_modules/readable-stream/.npmignore b/node_modules/bl/node_modules/readable-stream/.npmignore deleted file mode 100644 index 38344f87a..000000000 --- a/node_modules/bl/node_modules/readable-stream/.npmignore +++ /dev/null @@ -1,5 +0,0 @@ -build/ -test/ -examples/ -fs.js -zlib.js \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/.travis.yml b/node_modules/bl/node_modules/readable-stream/.travis.yml deleted file mode 100644 index 1b8211846..000000000 --- a/node_modules/bl/node_modules/readable-stream/.travis.yml +++ /dev/null @@ -1,52 +0,0 @@ -sudo: false -language: node_js -before_install: - - npm install -g npm@2 - - npm install -g npm -notifications: - email: false -matrix: - fast_finish: true - allow_failures: - - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest" - - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest" - include: - - node_js: '0.8' - env: TASK=test - - node_js: '0.10' - env: TASK=test - - node_js: '0.11' - env: TASK=test - - node_js: '0.12' - env: TASK=test - - node_js: 1 - env: TASK=test - - node_js: 2 - env: TASK=test - - node_js: 3 - env: TASK=test - - node_js: 4 - env: TASK=test - - node_js: 5 - env: TASK=test - - node_js: 5 - env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="-3..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="-3..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=ipad BROWSER_VERSION="6.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=iphone BROWSER_VERSION="6.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest" -script: "npm run $TASK" -env: - global: - - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= - - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/bl/node_modules/readable-stream/.zuul.yml b/node_modules/bl/node_modules/readable-stream/.zuul.yml deleted file mode 100644 index 96d9cfbd3..000000000 --- a/node_modules/bl/node_modules/readable-stream/.zuul.yml +++ /dev/null @@ -1 +0,0 @@ -ui: tape diff --git a/node_modules/bl/node_modules/readable-stream/LICENSE b/node_modules/bl/node_modules/readable-stream/LICENSE deleted file mode 100644 index e3d4e695a..000000000 --- a/node_modules/bl/node_modules/readable-stream/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/node_modules/bl/node_modules/readable-stream/README.md b/node_modules/bl/node_modules/readable-stream/README.md deleted file mode 100644 index 86b95a3bf..000000000 --- a/node_modules/bl/node_modules/readable-stream/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# readable-stream - -***Node-core v5.8.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) - - -[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) -[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) - - -[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) - -```bash -npm install --save readable-stream -``` - -***Node-core streams for userland*** - -This package is a mirror of the Streams2 and Streams3 implementations in -Node-core, including [documentation](doc/stream.markdown). - -If you want to guarantee a stable streams base, regardless of what version of -Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). - -As of version 2.0.0 **readable-stream** uses semantic versioning. - -# Streams WG Team Members - -* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> - - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B -* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> - - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 -* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> - - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D -* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> -* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> -* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> diff --git a/node_modules/bl/node_modules/readable-stream/doc/stream.markdown b/node_modules/bl/node_modules/readable-stream/doc/stream.markdown deleted file mode 100644 index 0bc3819e6..000000000 --- a/node_modules/bl/node_modules/readable-stream/doc/stream.markdown +++ /dev/null @@ -1,1760 +0,0 @@ -# Stream - - Stability: 2 - Stable - -A stream is an abstract interface implemented by various objects in -Node.js. For example a [request to an HTTP server][http-incoming-message] is a -stream, as is [`process.stdout`][]. Streams are readable, writable, or both. All -streams are instances of [`EventEmitter`][]. - -You can load the Stream base classes by doing `require('stream')`. -There are base classes provided for [Readable][] streams, [Writable][] -streams, [Duplex][] streams, and [Transform][] streams. - -This document is split up into 3 sections: - -1. The first section explains the parts of the API that you need to be - aware of to use streams in your programs. -2. The second section explains the parts of the API that you need to - use if you implement your own custom streams yourself. The API is designed to - make this easy for you to do. -3. The third section goes into more depth about how streams work, - including some of the internal mechanisms and functions that you - should probably not modify unless you definitely know what you are - doing. - - -## API for Stream Consumers - - - -Streams can be either [Readable][], [Writable][], or both ([Duplex][]). - -All streams are EventEmitters, but they also have other custom methods -and properties depending on whether they are Readable, Writable, or -Duplex. - -If a stream is both Readable and Writable, then it implements all of -the methods and events. So, a [Duplex][] or [Transform][] stream is -fully described by this API, though their implementation may be -somewhat different. - -It is not necessary to implement Stream interfaces in order to consume -streams in your programs. If you **are** implementing streaming -interfaces in your own program, please also refer to -[API for Stream Implementors][]. - -Almost all Node.js programs, no matter how simple, use Streams in some -way. Here is an example of using Streams in an Node.js program: - -```js -const http = require('http'); - -var server = http.createServer( (req, res) => { - // req is an http.IncomingMessage, which is a Readable Stream - // res is an http.ServerResponse, which is a Writable Stream - - var body = ''; - // we want to get the data as utf8 strings - // If you don't set an encoding, then you'll get Buffer objects - req.setEncoding('utf8'); - - // Readable streams emit 'data' events once a listener is added - req.on('data', (chunk) => { - body += chunk; - }); - - // the end event tells you that you have entire body - req.on('end', () => { - try { - var data = JSON.parse(body); - } catch (er) { - // uh oh! bad json! - res.statusCode = 400; - return res.end(`error: ${er.message}`); - } - - // write back something interesting to the user: - res.write(typeof data); - res.end(); - }); -}); - -server.listen(1337); - -// $ curl localhost:1337 -d '{}' -// object -// $ curl localhost:1337 -d '"foo"' -// string -// $ curl localhost:1337 -d 'not json' -// error: Unexpected token o -``` - -### Class: stream.Duplex - -Duplex streams are streams that implement both the [Readable][] and -[Writable][] interfaces. - -Examples of Duplex streams include: - -* [TCP sockets][] -* [zlib streams][zlib] -* [crypto streams][crypto] - -### Class: stream.Readable - - - -The Readable stream interface is the abstraction for a *source* of -data that you are reading from. In other words, data comes *out* of a -Readable stream. - -A Readable stream will not start emitting data until you indicate that -you are ready to receive it. - -Readable streams have two "modes": a **flowing mode** and a **paused -mode**. When in flowing mode, data is read from the underlying system -and provided to your program as fast as possible. In paused mode, you -must explicitly call [`stream.read()`][stream-read] to get chunks of data out. -Streams start out in paused mode. - -**Note**: If no data event handlers are attached, and there are no -[`stream.pipe()`][] destinations, and the stream is switched into flowing -mode, then data will be lost. - -You can switch to flowing mode by doing any of the following: - -* Adding a [`'data'`][] event handler to listen for data. -* Calling the [`stream.resume()`][stream-resume] method to explicitly open the - flow. -* Calling the [`stream.pipe()`][] method to send the data to a [Writable][]. - -You can switch back to paused mode by doing either of the following: - -* If there are no pipe destinations, by calling the - [`stream.pause()`][stream-pause] method. -* If there are pipe destinations, by removing any [`'data'`][] event - handlers, and removing all pipe destinations by calling the - [`stream.unpipe()`][] method. - -Note that, for backwards compatibility reasons, removing [`'data'`][] -event handlers will **not** automatically pause the stream. Also, if -there are piped destinations, then calling [`stream.pause()`][stream-pause] will -not guarantee that the stream will *remain* paused once those -destinations drain and ask for more data. - -Examples of readable streams include: - -* [HTTP responses, on the client][http-incoming-message] -* [HTTP requests, on the server][http-incoming-message] -* [fs read streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdout and stderr][] -* [`process.stdin`][] - -#### Event: 'close' - -Emitted when the stream and any of its underlying resources (a file -descriptor, for example) have been closed. The event indicates that -no more events will be emitted, and no further computation will occur. - -Not all streams will emit the `'close'` event. - -#### Event: 'data' - -* `chunk` {Buffer|String} The chunk of data. - -Attaching a `'data'` event listener to a stream that has not been -explicitly paused will switch the stream into flowing mode. Data will -then be passed as soon as it is available. - -If you just want to get all the data out of the stream as fast as -possible, this is the best way to do so. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); -}); -``` - -#### Event: 'end' - -This event fires when there will be no more data to read. - -Note that the `'end'` event **will not fire** unless the data is -completely consumed. This can be done by switching into flowing mode, -or by calling [`stream.read()`][stream-read] repeatedly until you get to the -end. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); -}); -readable.on('end', () => { - console.log('there will be no more data.'); -}); -``` - -#### Event: 'error' - -* {Error Object} - -Emitted if there was an error receiving data. - -#### Event: 'readable' - -When a chunk of data can be read from the stream, it will emit a -`'readable'` event. - -In some cases, listening for a `'readable'` event will cause some data -to be read into the internal buffer from the underlying system, if it -hadn't already. - -```javascript -var readable = getReadableStreamSomehow(); -readable.on('readable', () => { - // there is some data to read now -}); -``` - -Once the internal buffer is drained, a `'readable'` event will fire -again when more data is available. - -The `'readable'` event is not emitted in the "flowing" mode with the -sole exception of the last one, on end-of-stream. - -The `'readable'` event indicates that the stream has new information: -either new data is available or the end of the stream has been reached. -In the former case, [`stream.read()`][stream-read] will return that data. In the -latter case, [`stream.read()`][stream-read] will return null. For instance, in -the following example, `foo.txt` is an empty file: - -```js -const fs = require('fs'); -var rr = fs.createReadStream('foo.txt'); -rr.on('readable', () => { - console.log('readable:', rr.read()); -}); -rr.on('end', () => { - console.log('end'); -}); -``` - -The output of running this script is: - -``` -$ node test.js -readable: null -end -``` - -#### readable.isPaused() - -* Return: {Boolean} - -This method returns whether or not the `readable` has been **explicitly** -paused by client code (using [`stream.pause()`][stream-pause] without a -corresponding [`stream.resume()`][stream-resume]). - -```js -var readable = new stream.Readable - -readable.isPaused() // === false -readable.pause() -readable.isPaused() // === true -readable.resume() -readable.isPaused() // === false -``` - -#### readable.pause() - -* Return: `this` - -This method will cause a stream in flowing mode to stop emitting -[`'data'`][] events, switching out of flowing mode. Any data that becomes -available will remain in the internal buffer. - -```js -var readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log('got %d bytes of data', chunk.length); - readable.pause(); - console.log('there will be no more data for 1 second'); - setTimeout(() => { - console.log('now data will start flowing again'); - readable.resume(); - }, 1000); -}); -``` - -#### readable.pipe(destination[, options]) - -* `destination` {stream.Writable} The destination for writing data -* `options` {Object} Pipe options - * `end` {Boolean} End the writer when the reader ends. Default = `true` - -This method pulls all the data out of a readable stream, and writes it -to the supplied destination, automatically managing the flow so that -the destination is not overwhelmed by a fast readable stream. - -Multiple destinations can be piped to safely. - -```js -var readable = getReadableStreamSomehow(); -var writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt' -readable.pipe(writable); -``` - -This function returns the destination stream, so you can set up pipe -chains like so: - -```js -var r = fs.createReadStream('file.txt'); -var z = zlib.createGzip(); -var w = fs.createWriteStream('file.txt.gz'); -r.pipe(z).pipe(w); -``` - -For example, emulating the Unix `cat` command: - -```js -process.stdin.pipe(process.stdout); -``` - -By default [`stream.end()`][stream-end] is called on the destination when the -source stream emits [`'end'`][], so that `destination` is no longer writable. -Pass `{ end: false }` as `options` to keep the destination stream open. - -This keeps `writer` open so that "Goodbye" can be written at the -end. - -```js -reader.pipe(writer, { end: false }); -reader.on('end', () => { - writer.end('Goodbye\n'); -}); -``` - -Note that [`process.stderr`][] and [`process.stdout`][] are never closed until -the process exits, regardless of the specified options. - -#### readable.read([size]) - -* `size` {Number} Optional argument to specify how much data to read. -* Return {String|Buffer|Null} - -The `read()` method pulls some data out of the internal buffer and -returns it. If there is no data available, then it will return -`null`. - -If you pass in a `size` argument, then it will return that many -bytes. If `size` bytes are not available, then it will return `null`, -unless we've ended, in which case it will return the data remaining -in the buffer. - -If you do not specify a `size` argument, then it will return all the -data in the internal buffer. - -This method should only be called in paused mode. In flowing mode, -this method is called automatically until the internal buffer is -drained. - -```js -var readable = getReadableStreamSomehow(); -readable.on('readable', () => { - var chunk; - while (null !== (chunk = readable.read())) { - console.log('got %d bytes of data', chunk.length); - } -}); -``` - -If this method returns a data chunk, then it will also trigger the -emission of a [`'data'`][] event. - -Note that calling [`stream.read([size])`][stream-read] after the [`'end'`][] -event has been triggered will return `null`. No runtime error will be raised. - -#### readable.resume() - -* Return: `this` - -This method will cause the readable stream to resume emitting [`'data'`][] -events. - -This method will switch the stream into flowing mode. If you do *not* -want to consume the data from a stream, but you *do* want to get to -its [`'end'`][] event, you can call [`stream.resume()`][stream-resume] to open -the flow of data. - -```js -var readable = getReadableStreamSomehow(); -readable.resume(); -readable.on('end', () => { - console.log('got to the end, but did not read anything'); -}); -``` - -#### readable.setEncoding(encoding) - -* `encoding` {String} The encoding to use. -* Return: `this` - -Call this function to cause the stream to return strings of the specified -encoding instead of Buffer objects. For example, if you do -`readable.setEncoding('utf8')`, then the output data will be interpreted as -UTF-8 data, and returned as strings. If you do `readable.setEncoding('hex')`, -then the data will be encoded in hexadecimal string format. - -This properly handles multi-byte characters that would otherwise be -potentially mangled if you simply pulled the Buffers directly and -called [`buf.toString(encoding)`][] on them. If you want to read the data -as strings, always use this method. - -Also you can disable any encoding at all with `readable.setEncoding(null)`. -This approach is very useful if you deal with binary data or with large -multi-byte strings spread out over multiple chunks. - -```js -var readable = getReadableStreamSomehow(); -readable.setEncoding('utf8'); -readable.on('data', (chunk) => { - assert.equal(typeof chunk, 'string'); - console.log('got %d characters of string data', chunk.length); -}); -``` - -#### readable.unpipe([destination]) - -* `destination` {stream.Writable} Optional specific stream to unpipe - -This method will remove the hooks set up for a previous [`stream.pipe()`][] -call. - -If the destination is not specified, then all pipes are removed. - -If the destination is specified, but no pipe is set up for it, then -this is a no-op. - -```js -var readable = getReadableStreamSomehow(); -var writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt', -// but only for the first second -readable.pipe(writable); -setTimeout(() => { - console.log('stop writing to file.txt'); - readable.unpipe(writable); - console.log('manually close the file stream'); - writable.end(); -}, 1000); -``` - -#### readable.unshift(chunk) - -* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue - -This is useful in certain cases where a stream is being consumed by a -parser, which needs to "un-consume" some data that it has -optimistically pulled out of the source, so that the stream can be -passed on to some other party. - -Note that `stream.unshift(chunk)` cannot be called after the [`'end'`][] event -has been triggered; a runtime error will be raised. - -If you find that you must often call `stream.unshift(chunk)` in your -programs, consider implementing a [Transform][] stream instead. (See [API -for Stream Implementors][].) - -```js -// Pull off a header delimited by \n\n -// use unshift() if we get too much -// Call the callback with (error, header, stream) -const StringDecoder = require('string_decoder').StringDecoder; -function parseHeader(stream, callback) { - stream.on('error', callback); - stream.on('readable', onReadable); - var decoder = new StringDecoder('utf8'); - var header = ''; - function onReadable() { - var chunk; - while (null !== (chunk = stream.read())) { - var str = decoder.write(chunk); - if (str.match(/\n\n/)) { - // found the header boundary - var split = str.split(/\n\n/); - header += split.shift(); - var remaining = split.join('\n\n'); - var buf = new Buffer(remaining, 'utf8'); - if (buf.length) - stream.unshift(buf); - stream.removeListener('error', callback); - stream.removeListener('readable', onReadable); - // now the body of the message can be read from the stream. - callback(null, header, stream); - } else { - // still reading the header. - header += str; - } - } - } -} -``` - -Note that, unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)` -will not end the reading process by resetting the internal reading state of the -stream. This can cause unexpected results if `unshift()` is called during a -read (i.e. from within a [`stream._read()`][stream-_read] implementation on a -custom stream). Following the call to `unshift()` with an immediate -[`stream.push('')`][stream-push] will reset the reading state appropriately, -however it is best to simply avoid calling `unshift()` while in the process of -performing a read. - -#### readable.wrap(stream) - -* `stream` {Stream} An "old style" readable stream - -Versions of Node.js prior to v0.10 had streams that did not implement the -entire Streams API as it is today. (See [Compatibility][] for -more information.) - -If you are using an older Node.js library that emits [`'data'`][] events and -has a [`stream.pause()`][stream-pause] method that is advisory only, then you -can use the `wrap()` method to create a [Readable][] stream that uses the old -stream as its data source. - -You will very rarely ever need to call this function, but it exists -as a convenience for interacting with old Node.js programs and libraries. - -For example: - -```js -const OldReader = require('./old-api-module.js').OldReader; -const Readable = require('stream').Readable; -const oreader = new OldReader; -const myReader = new Readable().wrap(oreader); - -myReader.on('readable', () => { - myReader.read(); // etc. -}); -``` - -### Class: stream.Transform - -Transform streams are [Duplex][] streams where the output is in some way -computed from the input. They implement both the [Readable][] and -[Writable][] interfaces. - -Examples of Transform streams include: - -* [zlib streams][zlib] -* [crypto streams][crypto] - -### Class: stream.Writable - - - -The Writable stream interface is an abstraction for a *destination* -that you are writing data *to*. - -Examples of writable streams include: - -* [HTTP requests, on the client][] -* [HTTP responses, on the server][] -* [fs write streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdin][] -* [`process.stdout`][], [`process.stderr`][] - -#### Event: 'drain' - -If a [`stream.write(chunk)`][stream-write] call returns `false`, then the -`'drain'` event will indicate when it is appropriate to begin writing more data -to the stream. - -```js -// Write the data to the supplied writable stream one million times. -// Be attentive to back-pressure. -function writeOneMillionTimes(writer, data, encoding, callback) { - var i = 1000000; - write(); - function write() { - var ok = true; - do { - i -= 1; - if (i === 0) { - // last time! - writer.write(data, encoding, callback); - } else { - // see if we should continue, or wait - // don't pass the callback, because we're not done yet. - ok = writer.write(data, encoding); - } - } while (i > 0 && ok); - if (i > 0) { - // had to stop early! - // write some more once it drains - writer.once('drain', write); - } - } -} -``` - -#### Event: 'error' - -* {Error} - -Emitted if there was an error when writing or piping data. - -#### Event: 'finish' - -When the [`stream.end()`][stream-end] method has been called, and all data has -been flushed to the underlying system, this event is emitted. - -```javascript -var writer = getWritableStreamSomehow(); -for (var i = 0; i < 100; i ++) { - writer.write('hello, #${i}!\n'); -} -writer.end('this is the end\n'); -writer.on('finish', () => { - console.error('all writes are now complete.'); -}); -``` - -#### Event: 'pipe' - -* `src` {stream.Readable} source stream that is piping to this writable - -This is emitted whenever the [`stream.pipe()`][] method is called on a readable -stream, adding this writable to its set of destinations. - -```js -var writer = getWritableStreamSomehow(); -var reader = getReadableStreamSomehow(); -writer.on('pipe', (src) => { - console.error('something is piping into the writer'); - assert.equal(src, reader); -}); -reader.pipe(writer); -``` - -#### Event: 'unpipe' - -* `src` {[Readable][] Stream} The source stream that - [unpiped][`stream.unpipe()`] this writable - -This is emitted whenever the [`stream.unpipe()`][] method is called on a -readable stream, removing this writable from its set of destinations. - -```js -var writer = getWritableStreamSomehow(); -var reader = getReadableStreamSomehow(); -writer.on('unpipe', (src) => { - console.error('something has stopped piping into the writer'); - assert.equal(src, reader); -}); -reader.pipe(writer); -reader.unpipe(writer); -``` - -#### writable.cork() - -Forces buffering of all writes. - -Buffered data will be flushed either at [`stream.uncork()`][] or at -[`stream.end()`][stream-end] call. - -#### writable.end([chunk][, encoding][, callback]) - -* `chunk` {String|Buffer} Optional data to write -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Optional callback for when the stream is finished - -Call this method when no more data will be written to the stream. If supplied, -the callback is attached as a listener on the [`'finish'`][] event. - -Calling [`stream.write()`][stream-write] after calling -[`stream.end()`][stream-end] will raise an error. - -```js -// write 'hello, ' and then end with 'world!' -var file = fs.createWriteStream('example.txt'); -file.write('hello, '); -file.end('world!'); -// writing more now is not allowed! -``` - -#### writable.setDefaultEncoding(encoding) - -* `encoding` {String} The new default encoding - -Sets the default encoding for a writable stream. - -#### writable.uncork() - -Flush all data, buffered since [`stream.cork()`][] call. - -#### writable.write(chunk[, encoding][, callback]) - -* `chunk` {String|Buffer} The data to write -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Callback for when this chunk of data is flushed -* Returns: {Boolean} `true` if the data was handled completely. - -This method writes some data to the underlying system, and calls the -supplied callback once the data has been fully handled. - -The return value indicates if you should continue writing right now. -If the data had to be buffered internally, then it will return -`false`. Otherwise, it will return `true`. - -This return value is strictly advisory. You MAY continue to write, -even if it returns `false`. However, writes will be buffered in -memory, so it is best not to do this excessively. Instead, wait for -the [`'drain'`][] event before writing more data. - - -## API for Stream Implementors - - - -To implement any sort of stream, the pattern is the same: - -1. Extend the appropriate parent class in your own subclass. (The - [`util.inherits()`][] method is particularly helpful for this.) -2. Call the appropriate parent class constructor in your constructor, - to be sure that the internal mechanisms are set up properly. -3. Implement one or more specific methods, as detailed below. - -The class to extend and the method(s) to implement depend on the sort -of stream class you are writing: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-

Use-case

-
-

Class

-
-

Method(s) to implement

-
-

Reading only

-
-

[Readable](#stream_class_stream_readable_1)

-
-

[_read][stream-_read]

-
-

Writing only

-
-

[Writable](#stream_class_stream_writable_1)

-
-

[_write][stream-_write], [_writev][stream-_writev]

-
-

Reading and writing

-
-

[Duplex](#stream_class_stream_duplex_1)

-
-

[_read][stream-_read], [_write][stream-_write], [_writev][stream-_writev]

-
-

Operate on written data, then read the result

-
-

[Transform](#stream_class_stream_transform_1)

-
-

[_transform][stream-_transform], [_flush][stream-_flush]

-
- -In your implementation code, it is very important to never call the methods -described in [API for Stream Consumers][]. Otherwise, you can potentially cause -adverse side effects in programs that consume your streaming interfaces. - -### Class: stream.Duplex - - - -A "duplex" stream is one that is both Readable and Writable, such as a TCP -socket connection. - -Note that `stream.Duplex` is an abstract class designed to be extended -with an underlying implementation of the [`stream._read(size)`][stream-_read] -and [`stream._write(chunk, encoding, callback)`][stream-_write] methods as you -would with a Readable or Writable stream class. - -Since JavaScript doesn't have multiple prototypal inheritance, this class -prototypally inherits from Readable, and then parasitically from Writable. It is -thus up to the user to implement both the low-level -[`stream._read(n)`][stream-_read] method as well as the low-level -[`stream._write(chunk, encoding, callback)`][stream-_write] method on extension -duplex classes. - -#### new stream.Duplex(options) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `allowHalfOpen` {Boolean} Default = `true`. If set to `false`, then - the stream will automatically end the readable side when the - writable side ends and vice versa. - * `readableObjectMode` {Boolean} Default = `false`. Sets `objectMode` - for readable side of the stream. Has no effect if `objectMode` - is `true`. - * `writableObjectMode` {Boolean} Default = `false`. Sets `objectMode` - for writable side of the stream. Has no effect if `objectMode` - is `true`. - -In classes that extend the Duplex class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -### Class: stream.PassThrough - -This is a trivial implementation of a [Transform][] stream that simply -passes the input bytes across to the output. Its purpose is mainly -for examples and testing, but there are occasionally use cases where -it can come in handy as a building block for novel sorts of streams. - -### Class: stream.Readable - - - -`stream.Readable` is an abstract class designed to be extended with an -underlying implementation of the [`stream._read(size)`][stream-_read] method. - -Please see [API for Stream Consumers][] for how to consume -streams in your programs. What follows is an explanation of how to -implement Readable streams in your programs. - -#### new stream.Readable([options]) - -* `options` {Object} - * `highWaterMark` {Number} The maximum number of bytes to store in - the internal buffer before ceasing to read from the underlying - resource. Default = `16384` (16kb), or `16` for `objectMode` streams - * `encoding` {String} If specified, then buffers will be decoded to - strings using the specified encoding. Default = `null` - * `objectMode` {Boolean} Whether this stream should behave - as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns - a single value instead of a Buffer of size n. Default = `false` - * `read` {Function} Implementation for the [`stream._read()`][stream-_read] - method. - -In classes that extend the Readable class, make sure to call the -Readable constructor so that the buffering settings can be properly -initialized. - -#### readable.\_read(size) - -* `size` {Number} Number of bytes to read asynchronously - -Note: **Implement this method, but do NOT call it directly.** - -This method is prefixed with an underscore because it is internal to the -class that defines it and should only be called by the internal Readable -class methods. All Readable stream implementations must provide a \_read -method to fetch data from the underlying resource. - -When `_read()` is called, if data is available from the resource, the `_read()` -implementation should start pushing that data into the read queue by calling -[`this.push(dataChunk)`][stream-push]. `_read()` should continue reading from -the resource and pushing data until push returns `false`, at which point it -should stop reading from the resource. Only when `_read()` is called again after -it has stopped should it start reading more data from the resource and pushing -that data onto the queue. - -Note: once the `_read()` method is called, it will not be called again until -the [`stream.push()`][stream-push] method is called. - -The `size` argument is advisory. Implementations where a "read" is a -single call that returns data can use this to know how much data to -fetch. Implementations where that is not relevant, such as TCP or -TLS, may ignore this argument, and simply provide data whenever it -becomes available. There is no need, for example to "wait" until -`size` bytes are available before calling [`stream.push(chunk)`][stream-push]. - -#### readable.push(chunk[, encoding]) - - -* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue -* `encoding` {String} Encoding of String chunks. Must be a valid - Buffer encoding, such as `'utf8'` or `'ascii'` -* return {Boolean} Whether or not more pushes should be performed - -Note: **This method should be called by Readable implementors, NOT -by consumers of Readable streams.** - -If a value other than null is passed, The `push()` method adds a chunk of data -into the queue for subsequent stream processors to consume. If `null` is -passed, it signals the end of the stream (EOF), after which no more data -can be written. - -The data added with `push()` can be pulled out by calling the -[`stream.read()`][stream-read] method when the [`'readable'`][] event fires. - -This API is designed to be as flexible as possible. For example, -you may be wrapping a lower-level source which has some sort of -pause/resume mechanism, and a data callback. In those cases, you -could wrap the low-level source object by doing something like this: - -```js -// source is an object with readStop() and readStart() methods, -// and an `ondata` member that gets called when it has data, and -// an `onend` member that gets called when the data is over. - -util.inherits(SourceWrapper, Readable); - -function SourceWrapper(options) { - Readable.call(this, options); - - this._source = getLowlevelSourceObject(); - - // Every time there's data, we push it into the internal buffer. - this._source.ondata = (chunk) => { - // if push() returns false, then we need to stop reading from source - if (!this.push(chunk)) - this._source.readStop(); - }; - - // When the source ends, we push the EOF-signaling `null` chunk - this._source.onend = () => { - this.push(null); - }; -} - -// _read will be called when the stream wants to pull more data in -// the advisory size argument is ignored in this case. -SourceWrapper.prototype._read = function(size) { - this._source.readStart(); -}; -``` - -#### Example: A Counting Stream - - - -This is a basic example of a Readable stream. It emits the numerals -from 1 to 1,000,000 in ascending order, and then ends. - -```js -const Readable = require('stream').Readable; -const util = require('util'); -util.inherits(Counter, Readable); - -function Counter(opt) { - Readable.call(this, opt); - this._max = 1000000; - this._index = 1; -} - -Counter.prototype._read = function() { - var i = this._index++; - if (i > this._max) - this.push(null); - else { - var str = '' + i; - var buf = new Buffer(str, 'ascii'); - this.push(buf); - } -}; -``` - -#### Example: SimpleProtocol v1 (Sub-optimal) - -This is similar to the `parseHeader` function described -[here](#stream_readable_unshift_chunk), but implemented as a custom stream. -Also, note that this implementation does not convert the incoming data to a -string. - -However, this would be better implemented as a [Transform][] stream. See -[SimpleProtocol v2][] for a better implementation. - -```js -// A parser for a simple data protocol. -// The "header" is a JSON object, followed by 2 \n characters, and -// then a message body. -// -// NOTE: This can be done more simply as a Transform stream! -// Using Readable directly for this is sub-optimal. See the -// alternative example below under the Transform section. - -const Readable = require('stream').Readable; -const util = require('util'); - -util.inherits(SimpleProtocol, Readable); - -function SimpleProtocol(source, options) { - if (!(this instanceof SimpleProtocol)) - return new SimpleProtocol(source, options); - - Readable.call(this, options); - this._inBody = false; - this._sawFirstCr = false; - - // source is a readable stream, such as a socket or file - this._source = source; - - var self = this; - source.on('end', () => { - self.push(null); - }); - - // give it a kick whenever the source is readable - // read(0) will not consume any bytes - source.on('readable', () => { - self.read(0); - }); - - this._rawHeader = []; - this.header = null; -} - -SimpleProtocol.prototype._read = function(n) { - if (!this._inBody) { - var chunk = this._source.read(); - - // if the source doesn't have data, we don't have data yet. - if (chunk === null) - return this.push(''); - - // check if the chunk has a \n\n - var split = -1; - for (var i = 0; i < chunk.length; i++) { - if (chunk[i] === 10) { // '\n' - if (this._sawFirstCr) { - split = i; - break; - } else { - this._sawFirstCr = true; - } - } else { - this._sawFirstCr = false; - } - } - - if (split === -1) { - // still waiting for the \n\n - // stash the chunk, and try again. - this._rawHeader.push(chunk); - this.push(''); - } else { - this._inBody = true; - var h = chunk.slice(0, split); - this._rawHeader.push(h); - var header = Buffer.concat(this._rawHeader).toString(); - try { - this.header = JSON.parse(header); - } catch (er) { - this.emit('error', new Error('invalid simple protocol data')); - return; - } - // now, because we got some extra data, unshift the rest - // back into the read queue so that our consumer will see it. - var b = chunk.slice(split); - this.unshift(b); - // calling unshift by itself does not reset the reading state - // of the stream; since we're inside _read, doing an additional - // push('') will reset the state appropriately. - this.push(''); - - // and let them know that we are done parsing the header. - this.emit('header', this.header); - } - } else { - // from there on, just provide the data to our consumer. - // careful not to push(null), since that would indicate EOF. - var chunk = this._source.read(); - if (chunk) this.push(chunk); - } -}; - -// Usage: -// var parser = new SimpleProtocol(source); -// Now parser is a readable stream that will emit 'header' -// with the parsed header data. -``` - -### Class: stream.Transform - -A "transform" stream is a duplex stream where the output is causally -connected in some way to the input, such as a [zlib][] stream or a -[crypto][] stream. - -There is no requirement that the output be the same size as the input, -the same number of chunks, or arrive at the same time. For example, a -Hash stream will only ever have a single chunk of output which is -provided when the input is ended. A zlib stream will produce output -that is either much smaller or much larger than its input. - -Rather than implement the [`stream._read()`][stream-_read] and -[`stream._write()`][stream-_write] methods, Transform classes must implement the -[`stream._transform()`][stream-_transform] method, and may optionally -also implement the [`stream._flush()`][stream-_flush] method. (See below.) - -#### new stream.Transform([options]) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `transform` {Function} Implementation for the - [`stream._transform()`][stream-_transform] method. - * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush] - method. - -In classes that extend the Transform class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -#### Events: 'finish' and 'end' - -The [`'finish'`][] and [`'end'`][] events are from the parent Writable -and Readable classes respectively. The `'finish'` event is fired after -[`stream.end()`][stream-end] is called and all chunks have been processed by -[`stream._transform()`][stream-_transform], `'end'` is fired after all data has -been output which is after the callback in [`stream._flush()`][stream-_flush] -has been called. - -#### transform.\_flush(callback) - -* `callback` {Function} Call this function (optionally with an error - argument) when you are done flushing any remaining data. - -Note: **This function MUST NOT be called directly.** It MAY be implemented -by child classes, and if so, will be called by the internal Transform -class methods only. - -In some cases, your transform operation may need to emit a bit more -data at the end of the stream. For example, a `Zlib` compression -stream will store up some internal state so that it can optimally -compress the output. At the end, however, it needs to do the best it -can with what is left, so that the data will be complete. - -In those cases, you can implement a `_flush()` method, which will be -called at the very end, after all the written data is consumed, but -before emitting [`'end'`][] to signal the end of the readable side. Just -like with [`stream._transform()`][stream-_transform], call -`transform.push(chunk)` zero or more times, as appropriate, and call `callback` -when the flush operation is complete. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### transform.\_transform(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be transformed. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. If chunk is a buffer, then this is the special - value - 'buffer', ignore it in this case. -* `callback` {Function} Call this function (optionally with an error - argument and data) when you are done processing the supplied chunk. - -Note: **This function MUST NOT be called directly.** It should be -implemented by child classes, and called by the internal Transform -class methods only. - -All Transform stream implementations must provide a `_transform()` -method to accept input and produce output. - -`_transform()` should do whatever has to be done in this specific -Transform class, to handle the bytes being written, and pass them off -to the readable portion of the interface. Do asynchronous I/O, -process things, and so on. - -Call `transform.push(outputChunk)` 0 or more times to generate output -from this input chunk, depending on how much data you want to output -as a result of this chunk. - -Call the callback function only when the current chunk is completely -consumed. Note that there may or may not be output as a result of any -particular input chunk. If you supply a second argument to the callback -it will be passed to the push method. In other words the following are -equivalent: - -```js -transform.prototype._transform = function (data, encoding, callback) { - this.push(data); - callback(); -}; - -transform.prototype._transform = function (data, encoding, callback) { - callback(null, data); -}; -``` - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### Example: `SimpleProtocol` parser v2 - -The example [here](#stream_example_simpleprotocol_v1_sub_optimal) of a simple -protocol parser can be implemented simply by using the higher level -[Transform][] stream class, similar to the `parseHeader` and `SimpleProtocol -v1` examples. - -In this example, rather than providing the input as an argument, it -would be piped into the parser, which is a more idiomatic Node.js stream -approach. - -```javascript -const util = require('util'); -const Transform = require('stream').Transform; -util.inherits(SimpleProtocol, Transform); - -function SimpleProtocol(options) { - if (!(this instanceof SimpleProtocol)) - return new SimpleProtocol(options); - - Transform.call(this, options); - this._inBody = false; - this._sawFirstCr = false; - this._rawHeader = []; - this.header = null; -} - -SimpleProtocol.prototype._transform = function(chunk, encoding, done) { - if (!this._inBody) { - // check if the chunk has a \n\n - var split = -1; - for (var i = 0; i < chunk.length; i++) { - if (chunk[i] === 10) { // '\n' - if (this._sawFirstCr) { - split = i; - break; - } else { - this._sawFirstCr = true; - } - } else { - this._sawFirstCr = false; - } - } - - if (split === -1) { - // still waiting for the \n\n - // stash the chunk, and try again. - this._rawHeader.push(chunk); - } else { - this._inBody = true; - var h = chunk.slice(0, split); - this._rawHeader.push(h); - var header = Buffer.concat(this._rawHeader).toString(); - try { - this.header = JSON.parse(header); - } catch (er) { - this.emit('error', new Error('invalid simple protocol data')); - return; - } - // and let them know that we are done parsing the header. - this.emit('header', this.header); - - // now, because we got some extra data, emit this first. - this.push(chunk.slice(split)); - } - } else { - // from there on, just provide the data to our consumer as-is. - this.push(chunk); - } - done(); -}; - -// Usage: -// var parser = new SimpleProtocol(); -// source.pipe(parser) -// Now parser is a readable stream that will emit 'header' -// with the parsed header data. -``` - -### Class: stream.Writable - - - -`stream.Writable` is an abstract class designed to be extended with an -underlying implementation of the -[`stream._write(chunk, encoding, callback)`][stream-_write] method. - -Please see [API for Stream Consumers][] for how to consume -writable streams in your programs. What follows is an explanation of -how to implement Writable streams in your programs. - -#### new stream.Writable([options]) - -* `options` {Object} - * `highWaterMark` {Number} Buffer level when - [`stream.write()`][stream-write] starts returning `false`. Default = `16384` - (16kb), or `16` for `objectMode` streams. - * `decodeStrings` {Boolean} Whether or not to decode strings into - Buffers before passing them to [`stream._write()`][stream-_write]. - Default = `true` - * `objectMode` {Boolean} Whether or not the - [`stream.write(anyObj)`][stream-write] is a valid operation. If set you can - write arbitrary data instead of only `Buffer` / `String` data. - Default = `false` - * `write` {Function} Implementation for the - [`stream._write()`][stream-_write] method. - * `writev` {Function} Implementation for the - [`stream._writev()`][stream-_writev] method. - -In classes that extend the Writable class, make sure to call the -constructor so that the buffering settings can be properly -initialized. - -#### writable.\_write(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be written. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. If chunk is a buffer, then this is the special - value - 'buffer', ignore it in this case. -* `callback` {Function} Call this function (optionally with an error - argument) when you are done processing the supplied chunk. - -All Writable stream implementations must provide a -[`stream._write()`][stream-_write] method to send data to the underlying -resource. - -Note: **This function MUST NOT be called directly.** It should be -implemented by child classes, and called by the internal Writable -class methods only. - -Call the callback using the standard `callback(error)` pattern to -signal that the write completed successfully or with an error. - -If the `decodeStrings` flag is set in the constructor options, then -`chunk` may be a string rather than a Buffer, and `encoding` will -indicate the sort of string that it is. This is to support -implementations that have an optimized handling for certain string -data encodings. If you do not explicitly set the `decodeStrings` -option to `false`, then you can safely ignore the `encoding` argument, -and assume that `chunk` will always be a Buffer. - -This method is prefixed with an underscore because it is internal to -the class that defines it, and should not be called directly by user -programs. However, you **are** expected to override this method in -your own extension classes. - -#### writable.\_writev(chunks, callback) - -* `chunks` {Array} The chunks to be written. Each chunk has following - format: `{ chunk: ..., encoding: ... }`. -* `callback` {Function} Call this function (optionally with an error - argument) when you are done processing the supplied chunks. - -Note: **This function MUST NOT be called directly.** It may be -implemented by child classes, and called by the internal Writable -class methods only. - -This function is completely optional to implement. In most cases it is -unnecessary. If implemented, it will be called with all the chunks -that are buffered in the write queue. - - -## Simplified Constructor API - - - -In simple cases there is now the added benefit of being able to construct a -stream without inheritance. - -This can be done by passing the appropriate methods as constructor options: - -Examples: - -### Duplex - -```js -var duplex = new stream.Duplex({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - }, - write: function(chunk, encoding, next) { - // sets this._write under the hood - - // An optional error can be passed as the first argument - next() - } -}); - -// or - -var duplex = new stream.Duplex({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - }, - writev: function(chunks, next) { - // sets this._writev under the hood - - // An optional error can be passed as the first argument - next() - } -}); -``` - -### Readable - -```js -var readable = new stream.Readable({ - read: function(n) { - // sets this._read under the hood - - // push data onto the read queue, passing null - // will signal the end of the stream (EOF) - this.push(chunk); - } -}); -``` - -### Transform - -```js -var transform = new stream.Transform({ - transform: function(chunk, encoding, next) { - // sets this._transform under the hood - - // generate output as many times as needed - // this.push(chunk); - - // call when the current chunk is consumed - next(); - }, - flush: function(done) { - // sets this._flush under the hood - - // generate output as many times as needed - // this.push(chunk); - - done(); - } -}); -``` - -### Writable - -```js -var writable = new stream.Writable({ - write: function(chunk, encoding, next) { - // sets this._write under the hood - - // An optional error can be passed as the first argument - next() - } -}); - -// or - -var writable = new stream.Writable({ - writev: function(chunks, next) { - // sets this._writev under the hood - - // An optional error can be passed as the first argument - next() - } -}); -``` - -## Streams: Under the Hood - - - -### Buffering - - - -Both Writable and Readable streams will buffer data on an internal -object which can be retrieved from `_writableState.getBuffer()` or -`_readableState.buffer`, respectively. - -The amount of data that will potentially be buffered depends on the -`highWaterMark` option which is passed into the constructor. - -Buffering in Readable streams happens when the implementation calls -[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not -call [`stream.read()`][stream-read], then the data will sit in the internal -queue until it is consumed. - -Buffering in Writable streams happens when the user calls -[`stream.write(chunk)`][stream-write] repeatedly, even when it returns `false`. - -The purpose of streams, especially with the [`stream.pipe()`][] method, is to -limit the buffering of data to acceptable levels, so that sources and -destinations of varying speed will not overwhelm the available memory. - -### Compatibility with Older Node.js Versions - - - -In versions of Node.js prior to v0.10, the Readable stream interface was -simpler, but also less powerful and less useful. - -* Rather than waiting for you to call the [`stream.read()`][stream-read] method, - [`'data'`][] events would start emitting immediately. If you needed to do - some I/O to decide how to handle data, then you had to store the chunks - in some kind of buffer so that they would not be lost. -* The [`stream.pause()`][stream-pause] method was advisory, rather than - guaranteed. This meant that you still had to be prepared to receive - [`'data'`][] events even when the stream was in a paused state. - -In Node.js v0.10, the [Readable][] class was added. -For backwards compatibility with older Node.js programs, Readable streams -switch into "flowing mode" when a [`'data'`][] event handler is added, or -when the [`stream.resume()`][stream-resume] method is called. The effect is -that, even if you are not using the new [`stream.read()`][stream-read] method -and [`'readable'`][] event, you no longer have to worry about losing -[`'data'`][] chunks. - -Most programs will continue to function normally. However, this -introduces an edge case in the following conditions: - -* No [`'data'`][] event handler is added. -* The [`stream.resume()`][stream-resume] method is never called. -* The stream is not piped to any writable destination. - -For example, consider the following code: - -```js -// WARNING! BROKEN! -net.createServer((socket) => { - - // we add an 'end' method, but never consume the data - socket.on('end', () => { - // It will never get here. - socket.end('I got your message (but didnt read it)\n'); - }); - -}).listen(1337); -``` - -In versions of Node.js prior to v0.10, the incoming message data would be -simply discarded. However, in Node.js v0.10 and beyond, -the socket will remain paused forever. - -The workaround in this situation is to call the -[`stream.resume()`][stream-resume] method to start the flow of data: - -```js -// Workaround -net.createServer((socket) => { - - socket.on('end', () => { - socket.end('I got your message (but didnt read it)\n'); - }); - - // start the flow of data, discarding it. - socket.resume(); - -}).listen(1337); -``` - -In addition to new Readable streams switching into flowing mode, -pre-v0.10 style streams can be wrapped in a Readable class using the -[`stream.wrap()`][] method. - - -### Object Mode - - - -Normally, Streams operate on Strings and Buffers exclusively. - -Streams that are in **object mode** can emit generic JavaScript values -other than Buffers and Strings. - -A Readable stream in object mode will always return a single item from -a call to [`stream.read(size)`][stream-read], regardless of what the size -argument is. - -A Writable stream in object mode will always ignore the `encoding` -argument to [`stream.write(data, encoding)`][stream-write]. - -The special value `null` still retains its special value for object -mode streams. That is, for object mode readable streams, `null` as a -return value from [`stream.read()`][stream-read] indicates that there is no more -data, and [`stream.push(null)`][stream-push] will signal the end of stream data -(`EOF`). - -No streams in Node.js core are object mode streams. This pattern is only -used by userland streaming libraries. - -You should set `objectMode` in your stream child class constructor on -the options object. Setting `objectMode` mid-stream is not safe. - -For Duplex streams `objectMode` can be set exclusively for readable or -writable side with `readableObjectMode` and `writableObjectMode` -respectively. These options can be used to implement parsers and -serializers with Transform streams. - -```js -const util = require('util'); -const StringDecoder = require('string_decoder').StringDecoder; -const Transform = require('stream').Transform; -util.inherits(JSONParseStream, Transform); - -// Gets \n-delimited JSON string data, and emits the parsed objects -function JSONParseStream() { - if (!(this instanceof JSONParseStream)) - return new JSONParseStream(); - - Transform.call(this, { readableObjectMode : true }); - - this._buffer = ''; - this._decoder = new StringDecoder('utf8'); -} - -JSONParseStream.prototype._transform = function(chunk, encoding, cb) { - this._buffer += this._decoder.write(chunk); - // split on newlines - var lines = this._buffer.split(/\r?\n/); - // keep the last partial line buffered - this._buffer = lines.pop(); - for (var l = 0; l < lines.length; l++) { - var line = lines[l]; - try { - var obj = JSON.parse(line); - } catch (er) { - this.emit('error', er); - return; - } - // push the parsed object out to the readable consumer - this.push(obj); - } - cb(); -}; - -JSONParseStream.prototype._flush = function(cb) { - // Just handle any leftover - var rem = this._buffer.trim(); - if (rem) { - try { - var obj = JSON.parse(rem); - } catch (er) { - this.emit('error', er); - return; - } - // push the parsed object out to the readable consumer - this.push(obj); - } - cb(); -}; -``` - -### `stream.read(0)` - -There are some cases where you want to trigger a refresh of the -underlying readable stream mechanisms, without actually consuming any -data. In that case, you can call `stream.read(0)`, which will always -return null. - -If the internal read buffer is below the `highWaterMark`, and the -stream is not currently reading, then calling `stream.read(0)` will trigger -a low-level [`stream._read()`][stream-_read] call. - -There is almost never a need to do this. However, you will see some -cases in Node.js's internals where this is done, particularly in the -Readable stream class internals. - -### `stream.push('')` - -Pushing a zero-byte string or Buffer (when not in [Object mode][]) has an -interesting side effect. Because it *is* a call to -[`stream.push()`][stream-push], it will end the `reading` process. However, it -does *not* add any data to the readable buffer, so there's nothing for -a user to consume. - -Very rarely, there are cases where you have no data to provide now, -but the consumer of your stream (or, perhaps, another bit of your own -code) will know when to check again, by calling [`stream.read(0)`][stream-read]. -In those cases, you *may* call `stream.push('')`. - -So far, the only use case for this functionality is in the -[`tls.CryptoStream`][] class, which is deprecated in Node.js/io.js v1.0. If you -find that you have to use `stream.push('')`, please consider another -approach, because it almost certainly indicates that something is -horribly wrong. - -[`'data'`]: #stream_event_data -[`'drain'`]: #stream_event_drain -[`'end'`]: #stream_event_end -[`'finish'`]: #stream_event_finish -[`'readable'`]: #stream_event_readable -[`buf.toString(encoding)`]: https://nodejs.org/docs/v5.8.0/api/buffer.html#buffer_buf_tostring_encoding_start_end -[`EventEmitter`]: https://nodejs.org/docs/v5.8.0/api/events.html#events_class_eventemitter -[`process.stderr`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stderr -[`process.stdin`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdin -[`process.stdout`]: https://nodejs.org/docs/v5.8.0/api/process.html#process_process_stdout -[`stream.cork()`]: #stream_writable_cork -[`stream.pipe()`]: #stream_readable_pipe_destination_options -[`stream.uncork()`]: #stream_writable_uncork -[`stream.unpipe()`]: #stream_readable_unpipe_destination -[`stream.wrap()`]: #stream_readable_wrap_stream -[`tls.CryptoStream`]: https://nodejs.org/docs/v5.8.0/api/tls.html#tls_class_cryptostream -[`util.inherits()`]: https://nodejs.org/docs/v5.8.0/api/util.html#util_util_inherits_constructor_superconstructor -[API for Stream Consumers]: #stream_api_for_stream_consumers -[API for Stream Implementors]: #stream_api_for_stream_implementors -[child process stdin]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdin -[child process stdout and stderr]: https://nodejs.org/docs/v5.8.0/api/child_process.html#child_process_child_stdout -[Compatibility]: #stream_compatibility_with_older_node_js_versions -[crypto]: crypto.html -[Duplex]: #stream_class_stream_duplex -[fs read streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_readstream -[fs write streams]: https://nodejs.org/docs/v5.8.0/api/fs.html#fs_class_fs_writestream -[HTTP requests, on the client]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_clientrequest -[HTTP responses, on the server]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_serverresponse -[http-incoming-message]: https://nodejs.org/docs/v5.8.0/api/http.html#http_class_http_incomingmessage -[Object mode]: #stream_object_mode -[Readable]: #stream_class_stream_readable -[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2 -[stream-_flush]: #stream_transform_flush_callback -[stream-_read]: #stream_readable_read_size_1 -[stream-_transform]: #stream_transform_transform_chunk_encoding_callback -[stream-_write]: #stream_writable_write_chunk_encoding_callback_1 -[stream-_writev]: #stream_writable_writev_chunks_callback -[stream-end]: #stream_writable_end_chunk_encoding_callback -[stream-pause]: #stream_readable_pause -[stream-push]: #stream_readable_push_chunk_encoding -[stream-read]: #stream_readable_read_size -[stream-resume]: #stream_readable_resume -[stream-write]: #stream_writable_write_chunk_encoding_callback -[TCP sockets]: https://nodejs.org/docs/v5.8.0/api/net.html#net_class_net_socket -[Transform]: #stream_class_stream_transform -[Writable]: #stream_class_stream_writable -[zlib]: zlib.html diff --git a/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md deleted file mode 100644 index 83275f192..000000000 --- a/node_modules/bl/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md +++ /dev/null @@ -1,60 +0,0 @@ -# streams WG Meeting 2015-01-30 - -## Links - -* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg -* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 -* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ - -## Agenda - -Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. - -* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) -* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) -* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) -* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) - -## Minutes - -### adopt a charter - -* group: +1's all around - -### What versioning scheme should be adopted? -* group: +1’s 3.0.0 -* domenic+group: pulling in patches from other sources where appropriate -* mikeal: version independently, suggesting versions for io.js -* mikeal+domenic: work with TC to notify in advance of changes -simpler stream creation - -### streamline creation of streams -* sam: streamline creation of streams -* domenic: nice simple solution posted - but, we lose the opportunity to change the model - may not be backwards incompatible (double check keys) - - **action item:** domenic will check - -### remove implicit flowing of streams on(‘data’) -* add isFlowing / isPaused -* mikeal: worrying that we’re documenting polyfill methods – confuses users -* domenic: more reflective API is probably good, with warning labels for users -* new section for mad scientists (reflective stream access) -* calvin: name the “third state” -* mikeal: maybe borrow the name from whatwg? -* domenic: we’re missing the “third state” -* consensus: kind of difficult to name the third state -* mikeal: figure out differences in states / compat -* mathias: always flow on data – eliminates third state - * explore what it breaks - -**action items:** -* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) -* ask rod/build for infrastructure -* **chris**: explore the “flow on data” approach -* add isPaused/isFlowing -* add new docs section -* move isPaused to that section - - diff --git a/node_modules/bl/node_modules/readable-stream/duplex.js b/node_modules/bl/node_modules/readable-stream/duplex.js deleted file mode 100644 index ca807af87..000000000 --- a/node_modules/bl/node_modules/readable-stream/duplex.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_duplex.js") diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js deleted file mode 100644 index 736693b84..000000000 --- a/node_modules/bl/node_modules/readable-stream/lib/_stream_duplex.js +++ /dev/null @@ -1,75 +0,0 @@ -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. - -'use strict'; - -/**/ - -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) { - keys.push(key); - }return keys; -}; -/**/ - -module.exports = Duplex; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -var Readable = require('./_stream_readable'); -var Writable = require('./_stream_writable'); - -util.inherits(Duplex, Readable); - -var keys = objectKeys(Writable.prototype); -for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; -} - -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - - Readable.call(this, options); - Writable.call(this, options); - - if (options && options.readable === false) this.readable = false; - - if (options && options.writable === false) this.writable = false; - - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - - this.once('end', onend); -} - -// the no-half-open enforcer -function onend() { - // if we allow half-open state, or if the writable side ended, - // then we're ok. - if (this.allowHalfOpen || this._writableState.ended) return; - - // no more data can be written. - // But allow more writes to happen in this tick. - processNextTick(onEndNT, this); -} - -function onEndNT(self) { - self.end(); -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js deleted file mode 100644 index d06f71f18..000000000 --- a/node_modules/bl/node_modules/readable-stream/lib/_stream_passthrough.js +++ /dev/null @@ -1,26 +0,0 @@ -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. - -'use strict'; - -module.exports = PassThrough; - -var Transform = require('./_stream_transform'); - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -util.inherits(PassThrough, Transform); - -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - - Transform.call(this, options); -} - -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -}; \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js deleted file mode 100644 index 54a9d5c55..000000000 --- a/node_modules/bl/node_modules/readable-stream/lib/_stream_readable.js +++ /dev/null @@ -1,880 +0,0 @@ -'use strict'; - -module.exports = Readable; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var isArray = require('isarray'); -/**/ - -/**/ -var Buffer = require('buffer').Buffer; -/**/ - -Readable.ReadableState = ReadableState; - -var EE = require('events'); - -/**/ -var EElistenerCount = function (emitter, type) { - return emitter.listeners(type).length; -}; -/**/ - -/**/ -var Stream; -(function () { - try { - Stream = require('st' + 'ream'); - } catch (_) {} finally { - if (!Stream) Stream = require('events').EventEmitter; - } -})(); -/**/ - -var Buffer = require('buffer').Buffer; - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -/**/ -var debugUtil = require('util'); -var debug = undefined; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function () {}; -} -/**/ - -var StringDecoder; - -util.inherits(Readable, Stream); - -var Duplex; -function ReadableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - - if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - var hwm = options.highWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; - - // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; - - this.buffer = []; - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // when piping, we only care about 'readable' events that happen - // after read()ing all the bytes and not getting any pushback. - this.ranOut = false; - - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; - - // if true, a maybeReadMore has been scheduled - this.readingMore = false; - - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } -} - -var Duplex; -function Readable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - if (!(this instanceof Readable)) return new Readable(options); - - this._readableState = new ReadableState(options, this); - - // legacy - this.readable = true; - - if (options && typeof options.read === 'function') this._read = options.read; - - Stream.call(this); -} - -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - - if (!state.objectMode && typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = new Buffer(chunk, encoding); - encoding = ''; - } - } - - return readableAddChunk(this, state, chunk, encoding, false); -}; - -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - var state = this._readableState; - return readableAddChunk(this, state, chunk, '', true); -}; - -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; - -function readableAddChunk(stream, state, chunk, encoding, addToFront) { - var er = chunkInvalid(state, chunk); - if (er) { - stream.emit('error', er); - } else if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (state.ended && !addToFront) { - var e = new Error('stream.push() after EOF'); - stream.emit('error', e); - } else if (state.endEmitted && addToFront) { - var e = new Error('stream.unshift() after end event'); - stream.emit('error', e); - } else { - var skipAdd; - if (state.decoder && !addToFront && !encoding) { - chunk = state.decoder.write(chunk); - skipAdd = !state.objectMode && chunk.length === 0; - } - - if (!addToFront) state.reading = false; - - // Don't add to the buffer if we've decoded to an empty string chunk and - // we're not in object mode - if (!skipAdd) { - // if we want the data now, just emit it. - if (state.flowing && state.length === 0 && !state.sync) { - stream.emit('data', chunk); - stream.read(0); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - - if (state.needReadable) emitReadable(stream); - } - } - - maybeReadMore(stream, state); - } - } else if (!addToFront) { - state.reading = false; - } - - return needMoreData(state); -} - -// if it's past the high water mark, we can push in some more. -// Also, if we have no data yet, we can stand some -// more bytes. This is to work around cases where hwm=0, -// such as the repl. Also, if the push() triggered a -// readable event, and the user called read(largeNumber) such that -// needReadable was set, then we ought to push more, so that another -// 'readable' event will be triggered. -function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); -} - -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; - return this; -}; - -// Don't raise the hwm > 8MB -var MAX_HWM = 0x800000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - n = MAX_HWM; - } else { - // Get the next highest power of 2 - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - return n; -} - -function howMuchToRead(n, state) { - if (state.length === 0 && state.ended) return 0; - - if (state.objectMode) return n === 0 ? 0 : 1; - - if (n === null || isNaN(n)) { - // only flow one buffer at a time - if (state.flowing && state.buffer.length) return state.buffer[0].length;else return state.length; - } - - if (n <= 0) return 0; - - // If we're asking for more than the target buffer level, - // then raise the water mark. Bump up to the next highest - // power of 2, to prevent increasing it excessively in tiny - // amounts. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - - // don't have that much. return null, unless we've ended. - if (n > state.length) { - if (!state.ended) { - state.needReadable = true; - return 0; - } else { - return state.length; - } - } - - return n; -} - -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - var state = this._readableState; - var nOrig = n; - - if (typeof n !== 'number' || n > 0) state.emittedReadable = false; - - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } - - n = howMuchToRead(n, state); - - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } - - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); - - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } - - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } - - if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - } - - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (doRead && !state.reading) n = howMuchToRead(nOrig, state); - - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - - if (ret === null) { - state.needReadable = true; - n = 0; - } - - state.length -= n; - - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (state.length === 0 && !state.ended) state.needReadable = true; - - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended && state.length === 0) endReadable(this); - - if (ret !== null) this.emit('data', ret); - - return ret; -}; - -function chunkInvalid(state, chunk) { - var er = null; - if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - return er; -} - -function onEofChunk(stream, state) { - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } - } - state.ended = true; - - // emit 'readable' now to make sure it gets picked up. - emitReadable(stream); -} - -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream); - } -} - -function emitReadable_(stream) { - debug('emit readable'); - stream.emit('readable'); - flow(stream); -} - -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - processNextTick(maybeReadMore_, stream, state); - } -} - -function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break;else len = state.length; - } - state.readingMore = false; -} - -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - this.emit('error', new Error('not implemented')); -}; - -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - - var endFn = doEnd ? onend : cleanup; - if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn); - - dest.on('unpipe', onunpipe); - function onunpipe(readable) { - debug('onunpipe'); - if (readable === src) { - cleanup(); - } - } - - function onend() { - debug('onend'); - dest.end(); - } - - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', cleanup); - src.removeListener('data', ondata); - - cleanedUp = true; - - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - var ret = dest.write(chunk); - if (false === ret) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - if (state.pipesCount === 1 && state.pipes[0] === dest && src.listenerCount('data') === 1 && !cleanedUp) { - debug('false write response, pause', src._readableState.awaitDrain); - src._readableState.awaitDrain++; - } - src.pause(); - } - } - - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); - } - // This is a brutally ugly hack to make sure that our error handler - // is attached before any userland ones. NEVER DO THIS. - if (!dest._events || !dest._events.error) dest.on('error', onerror);else if (isArray(dest._events.error)) dest._events.error.unshift(onerror);else dest._events.error = [onerror, dest._events.error]; - - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - dest.once('finish', onfinish); - - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } - - // tell the dest that it's being piped to - dest.emit('pipe', src); - - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - - return dest; -}; - -function pipeOnDrain(src) { - return function () { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; -} - -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; - - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - - if (!dest) dest = state.pipes; - - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this); - return this; - } - - // slow case. multiple pipe destinations. - - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - - for (var _i = 0; _i < len; _i++) { - dests[_i].emit('unpipe', this); - }return this; - } - - // try to find the right one. - var i = indexOf(state.pipes, dest); - if (i === -1) return this; - - state.pipes.splice(i, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - - dest.emit('unpipe', this); - - return this; -}; - -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - - // If listening to data, and it has not explicitly been paused, - // then call resume to start the flow of data on the next tick. - if (ev === 'data' && false !== this._readableState.flowing) { - this.resume(); - } - - if (ev === 'readable' && !this._readableState.endEmitted) { - var state = this._readableState; - if (!state.readableListening) { - state.readableListening = true; - state.emittedReadable = false; - state.needReadable = true; - if (!state.reading) { - processNextTick(nReadingNextTick, this); - } else if (state.length) { - emitReadable(this, state); - } - } - } - - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; - -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} - -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - state.flowing = true; - resume(this, state); - } - return this; -}; - -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - processNextTick(resume_, stream, state); - } -} - -function resume_(stream, state) { - if (!state.reading) { - debug('resume read 0'); - stream.read(0); - } - - state.resumeScheduled = false; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} - -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - return this; -}; - -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - if (state.flowing) { - do { - var chunk = stream.read(); - } while (null !== chunk && state.flowing); - } -} - -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var state = this._readableState; - var paused = false; - - var self = this; - stream.on('end', function () { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) self.push(chunk); - } - - self.push(null); - }); - - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); - - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = self.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); - - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function (method) { - return function () { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } - - // proxy certain important events. - var events = ['error', 'close', 'destroy', 'pause', 'resume']; - forEach(events, function (ev) { - stream.on(ev, self.emit.bind(self, ev)); - }); - - // when we try to consume some more bytes, simply unpause the - // underlying stream. - self._read = function (n) { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); - } - }; - - return self; -}; - -// exposed for testing purposes only. -Readable._fromList = fromList; - -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -function fromList(n, state) { - var list = state.buffer; - var length = state.length; - var stringMode = !!state.decoder; - var objectMode = !!state.objectMode; - var ret; - - // nothing in the list, definitely empty. - if (list.length === 0) return null; - - if (length === 0) ret = null;else if (objectMode) ret = list.shift();else if (!n || n >= length) { - // read it all, truncate the array. - if (stringMode) ret = list.join('');else if (list.length === 1) ret = list[0];else ret = Buffer.concat(list, length); - list.length = 0; - } else { - // read just some of it. - if (n < list[0].length) { - // just take a part of the first list item. - // slice is the same for buffers and strings. - var buf = list[0]; - ret = buf.slice(0, n); - list[0] = buf.slice(n); - } else if (n === list[0].length) { - // first list is a perfect match - ret = list.shift(); - } else { - // complex case. - // we have enough to cover it, but it spans past the first buffer. - if (stringMode) ret = '';else ret = new Buffer(n); - - var c = 0; - for (var i = 0, l = list.length; i < l && c < n; i++) { - var buf = list[0]; - var cpy = Math.min(n - c, buf.length); - - if (stringMode) ret += buf.slice(0, cpy);else buf.copy(ret, c, 0, cpy); - - if (cpy < buf.length) list[0] = buf.slice(cpy);else list.shift(); - - c += cpy; - } - } - } - - return ret; -} - -function endReadable(stream) { - var state = stream._readableState; - - // If we get here before consuming all the bytes, then that is a - // bug in node. Should never happen. - if (state.length > 0) throw new Error('endReadable called on non-empty stream'); - - if (!state.endEmitted) { - state.ended = true; - processNextTick(endReadableNT, state, stream); - } -} - -function endReadableNT(state, stream) { - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - } -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} - -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - return -1; -} \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js deleted file mode 100644 index 625cdc176..000000000 --- a/node_modules/bl/node_modules/readable-stream/lib/_stream_transform.js +++ /dev/null @@ -1,180 +0,0 @@ -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. - -'use strict'; - -module.exports = Transform; - -var Duplex = require('./_stream_duplex'); - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -util.inherits(Transform, Duplex); - -function TransformState(stream) { - this.afterTransform = function (er, data) { - return afterTransform(stream, er, data); - }; - - this.needTransform = false; - this.transforming = false; - this.writecb = null; - this.writechunk = null; - this.writeencoding = null; -} - -function afterTransform(stream, er, data) { - var ts = stream._transformState; - ts.transforming = false; - - var cb = ts.writecb; - - if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); - - ts.writechunk = null; - ts.writecb = null; - - if (data !== null && data !== undefined) stream.push(data); - - cb(er); - - var rs = stream._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - stream._read(rs.highWaterMark); - } -} - -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - - Duplex.call(this, options); - - this._transformState = new TransformState(this); - - // when the writable side finishes, then flush out anything remaining. - var stream = this; - - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; - - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; - - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - - if (typeof options.flush === 'function') this._flush = options.flush; - } - - this.once('prefinish', function () { - if (typeof this._flush === 'function') this._flush(function (er) { - done(stream, er); - });else done(stream); - }); -} - -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; - -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('not implemented'); -}; - -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; - -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; - - if (ts.writechunk !== null && ts.writecb && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; - -function done(stream, er) { - if (er) return stream.emit('error', er); - - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - var ws = stream._writableState; - var ts = stream._transformState; - - if (ws.length) throw new Error('calling transform done when ws.length != 0'); - - if (ts.transforming) throw new Error('calling transform done when still transforming'); - - return stream.push(null); -} \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js deleted file mode 100644 index 95916c992..000000000 --- a/node_modules/bl/node_modules/readable-stream/lib/_stream_writable.js +++ /dev/null @@ -1,516 +0,0 @@ -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. - -'use strict'; - -module.exports = Writable; - -/**/ -var processNextTick = require('process-nextick-args'); -/**/ - -/**/ -var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; -/**/ - -/**/ -var Buffer = require('buffer').Buffer; -/**/ - -Writable.WritableState = WritableState; - -/**/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/**/ - -/**/ -var internalUtil = { - deprecate: require('util-deprecate') -}; -/**/ - -/**/ -var Stream; -(function () { - try { - Stream = require('st' + 'ream'); - } catch (_) {} finally { - if (!Stream) Stream = require('events').EventEmitter; - } -})(); -/**/ - -var Buffer = require('buffer').Buffer; - -util.inherits(Writable, Stream); - -function nop() {} - -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} - -var Duplex; -function WritableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; - - if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - var hwm = options.highWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; - - // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; - - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; - - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; - - // a flag to see when we're in the middle of a write. - this.writing = false; - - // when true all writes will be buffered until .uncork() call - this.corked = 0; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; - - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; - - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; - - // the amount that is being written when _write is called. - this.writelen = 0; - - this.bufferedRequest = null; - this.lastBufferedRequest = null; - - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; - - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; - - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; - - // count buffered requests - this.bufferedRequestCount = 0; - - // create the two objects needed to store the corked requests - // they are not a linked list, as no new elements are inserted in there - this.corkedRequestsFree = new CorkedRequest(this); - this.corkedRequestsFree.next = new CorkedRequest(this); -} - -WritableState.prototype.getBuffer = function writableStateGetBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; - } - return out; -}; - -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function () { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.') - }); - } catch (_) {} -})(); - -var Duplex; -function Writable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - // Writable ctor is applied to Duplexes, though they're not - // instanceof Writable, they're instanceof Readable. - if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); - - this._writableState = new WritableState(options, this); - - // legacy. - this.writable = true; - - if (options) { - if (typeof options.write === 'function') this._write = options.write; - - if (typeof options.writev === 'function') this._writev = options.writev; - } - - Stream.call(this); -} - -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - this.emit('error', new Error('Cannot pipe. Not readable.')); -}; - -function writeAfterEnd(stream, cb) { - var er = new Error('write after end'); - // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); - processNextTick(cb, er); -} - -// If we get something that is not a buffer, string, null, or undefined, -// and we're not in objectMode, then that's an error. -// Otherwise stream chunks are all considered to be of length=1, and the -// watermarks determine how many objects to keep in the buffer, rather than -// how many bytes or characters. -function validChunk(stream, state, chunk, cb) { - var valid = true; - - if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { - var er = new TypeError('Invalid non-string/buffer chunk'); - stream.emit('error', er); - processNextTick(cb, er); - valid = false; - } - return valid; -} - -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - - if (typeof cb !== 'function') cb = nop; - - if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, chunk, encoding, cb); - } - - return ret; -}; - -Writable.prototype.cork = function () { - var state = this._writableState; - - state.corked++; -}; - -Writable.prototype.uncork = function () { - var state = this._writableState; - - if (state.corked) { - state.corked--; - - if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } -}; - -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); - this._writableState.defaultEncoding = encoding; -}; - -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = new Buffer(chunk, encoding); - } - return chunk; -} - -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, chunk, encoding, cb) { - chunk = decodeChunk(state, chunk, encoding); - - if (Buffer.isBuffer(chunk)) encoding = 'buffer'; - var len = state.objectMode ? 1 : chunk.length; - - state.length += len; - - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; - - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = new WriteReq(chunk, encoding, cb); - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } - - return ret; -} - -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} - -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - if (sync) processNextTick(cb, er);else cb(er); - - stream._writableState.errorEmitted = true; - stream.emit('error', er); -} - -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} - -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - - onwriteStateUpdate(state); - - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state); - - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - - if (sync) { - /**/ - asyncWrite(afterWrite, stream, state, finished, cb); - /**/ - } else { - afterWrite(stream, state, finished, cb); - } - } -} - -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} - -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); - } -} - -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - - var count = 0; - while (entry) { - buffer[count] = entry; - entry = entry.next; - count += 1; - } - - doWrite(stream, state, true, state.length, buffer, '', holder.finish); - - // doWrite is always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; - } - } - - if (entry === null) state.lastBufferedRequest = null; - } - - state.bufferedRequestCount = 0; - state.bufferedRequest = entry; - state.bufferProcessing = false; -} - -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('not implemented')); -}; - -Writable.prototype._writev = null; - -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); - } - - // ignore unnecessary end() calls. - if (!state.ending && !state.finished) endWritable(this, state, cb); -}; - -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} - -function prefinish(stream, state) { - if (!state.prefinished) { - state.prefinished = true; - stream.emit('prefinish'); - } -} - -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - if (state.pendingcb === 0) { - prefinish(stream, state); - state.finished = true; - stream.emit('finish'); - } else { - prefinish(stream, state); - } - } - return need; -} - -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) processNextTick(cb);else stream.once('finish', cb); - } - state.ended = true; - stream.writable = false; -} - -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - var _this = this; - - this.next = null; - this.entry = null; - - this.finish = function (err) { - var entry = _this.entry; - _this.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } - if (state.corkedRequestsFree) { - state.corkedRequestsFree.next = _this; - } else { - state.corkedRequestsFree = _this; - } - }; -} \ No newline at end of file diff --git a/node_modules/bl/node_modules/readable-stream/package.json b/node_modules/bl/node_modules/readable-stream/package.json deleted file mode 100644 index d77b090ec..000000000 --- a/node_modules/bl/node_modules/readable-stream/package.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "name": "readable-stream", - "version": "2.0.6", - "description": "Streams3, a user-land copy of the stream library from Node.js", - "main": "readable.js", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "~1.0.0", - "process-nextick-args": "~1.0.6", - "string_decoder": "~0.10.x", - "util-deprecate": "~1.0.1" - }, - "devDependencies": { - "tap": "~0.2.6", - "tape": "~4.5.1", - "zuul": "~3.9.0" - }, - "scripts": { - "test": "tap test/parallel/*.js test/ours/*.js", - "browser": "npm run write-zuul && zuul -- test/browser.js", - "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml" - }, - "repository": { - "type": "git", - "url": "git://github.com/nodejs/readable-stream" - }, - "keywords": [ - "readable", - "stream", - "pipe" - ], - "browser": { - "util": false - }, - "license": "MIT" -} diff --git a/node_modules/bl/node_modules/readable-stream/passthrough.js b/node_modules/bl/node_modules/readable-stream/passthrough.js deleted file mode 100644 index 27e8d8a55..000000000 --- a/node_modules/bl/node_modules/readable-stream/passthrough.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_passthrough.js") diff --git a/node_modules/bl/node_modules/readable-stream/readable.js b/node_modules/bl/node_modules/readable-stream/readable.js deleted file mode 100644 index 6222a5798..000000000 --- a/node_modules/bl/node_modules/readable-stream/readable.js +++ /dev/null @@ -1,12 +0,0 @@ -var Stream = (function (){ - try { - return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify - } catch(_){} -}()); -exports = module.exports = require('./lib/_stream_readable.js'); -exports.Stream = Stream || exports; -exports.Readable = exports; -exports.Writable = require('./lib/_stream_writable.js'); -exports.Duplex = require('./lib/_stream_duplex.js'); -exports.Transform = require('./lib/_stream_transform.js'); -exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/bl/node_modules/readable-stream/transform.js b/node_modules/bl/node_modules/readable-stream/transform.js deleted file mode 100644 index 5d482f078..000000000 --- a/node_modules/bl/node_modules/readable-stream/transform.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_transform.js") diff --git a/node_modules/bl/node_modules/readable-stream/writable.js b/node_modules/bl/node_modules/readable-stream/writable.js deleted file mode 100644 index e1e9efdf3..000000000 --- a/node_modules/bl/node_modules/readable-stream/writable.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_writable.js") -- cgit v1.2.3