diff options
author | Florian Dold <florian.dold@gmail.com> | 2016-11-03 01:33:53 +0100 |
---|---|---|
committer | Florian Dold <florian.dold@gmail.com> | 2016-11-03 01:33:53 +0100 |
commit | d1291f67551c58168af43698a359cb5ddfd266b0 (patch) | |
tree | 55a13ed29fe1915e3f42f1b1b7038dafa2e975a7 /node_modules/merge-stream | |
parent | d0a0695fb5d34996850723f7d4b1b59c3df909c2 (diff) |
node_modules
Diffstat (limited to 'node_modules/merge-stream')
27 files changed, 12 insertions, 4447 deletions
diff --git a/node_modules/merge-stream/node_modules/isarray/.npmignore b/node_modules/merge-stream/node_modules/isarray/.npmignore deleted file mode 100644 index 3c3629e64..000000000 --- a/node_modules/merge-stream/node_modules/isarray/.npmignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/node_modules/merge-stream/node_modules/isarray/.travis.yml b/node_modules/merge-stream/node_modules/isarray/.travis.yml deleted file mode 100644 index cc4dba29d..000000000 --- a/node_modules/merge-stream/node_modules/isarray/.travis.yml +++ /dev/null @@ -1,4 +0,0 @@ -language: node_js -node_js: - - "0.8" - - "0.10" diff --git a/node_modules/merge-stream/node_modules/isarray/Makefile b/node_modules/merge-stream/node_modules/isarray/Makefile deleted file mode 100644 index 787d56e1e..000000000 --- a/node_modules/merge-stream/node_modules/isarray/Makefile +++ /dev/null @@ -1,6 +0,0 @@ - -test: - @node_modules/.bin/tape test.js - -.PHONY: test - diff --git a/node_modules/merge-stream/node_modules/isarray/README.md b/node_modules/merge-stream/node_modules/isarray/README.md deleted file mode 100644 index 16d2c59c6..000000000 --- a/node_modules/merge-stream/node_modules/isarray/README.md +++ /dev/null @@ -1,60 +0,0 @@ - -# isarray - -`Array#isArray` for older browsers. - -[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) -[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) - -[![browser support](https://ci.testling.com/juliangruber/isarray.png) -](https://ci.testling.com/juliangruber/isarray) - -## Usage - -```js -var isArray = require('isarray'); - -console.log(isArray([])); // => true -console.log(isArray({})); // => false -``` - -## Installation - -With [npm](http://npmjs.org) do - -```bash -$ npm install isarray -``` - -Then bundle for the browser with -[browserify](https://github.com/substack/browserify). - -With [component](http://component.io) do - -```bash -$ component install juliangruber/isarray -``` - -## License - -(MIT) - -Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/merge-stream/node_modules/isarray/component.json b/node_modules/merge-stream/node_modules/isarray/component.json deleted file mode 100644 index 9e31b6838..000000000 --- a/node_modules/merge-stream/node_modules/isarray/component.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "name" : "isarray", - "description" : "Array#isArray for older browsers", - "version" : "0.0.1", - "repository" : "juliangruber/isarray", - "homepage": "https://github.com/juliangruber/isarray", - "main" : "index.js", - "scripts" : [ - "index.js" - ], - "dependencies" : {}, - "keywords": ["browser","isarray","array"], - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "license": "MIT" -} diff --git a/node_modules/merge-stream/node_modules/isarray/index.js b/node_modules/merge-stream/node_modules/isarray/index.js deleted file mode 100644 index a57f63495..000000000 --- a/node_modules/merge-stream/node_modules/isarray/index.js +++ /dev/null @@ -1,5 +0,0 @@ -var toString = {}.toString; - -module.exports = Array.isArray || function (arr) { - return toString.call(arr) == '[object Array]'; -}; diff --git a/node_modules/merge-stream/node_modules/isarray/package.json b/node_modules/merge-stream/node_modules/isarray/package.json deleted file mode 100644 index 17c938a7d..000000000 --- a/node_modules/merge-stream/node_modules/isarray/package.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "isarray@~1.0.0", - "scope": null, - "escapedName": "isarray", - "name": "isarray", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "/home/dold/repos/taler/wallet-webex/node_modules/merge-stream/node_modules/readable-stream" - ] - ], - "_from": "isarray@>=1.0.0 <1.1.0", - "_id": "isarray@1.0.0", - "_inCache": true, - "_location": "/merge-stream/isarray", - "_nodeVersion": "5.1.0", - "_npmUser": { - "name": "juliangruber", - "email": "julian@juliangruber.com" - }, - "_npmVersion": "3.3.12", - "_phantomChildren": {}, - "_requested": { - "raw": "isarray@~1.0.0", - "scope": null, - "escapedName": "isarray", - "name": "isarray", - "rawSpec": "~1.0.0", - "spec": ">=1.0.0 <1.1.0", - "type": "range" - }, - "_requiredBy": [ - "/merge-stream/readable-stream" - ], - "_resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "_shasum": "bb935d48582cba168c06834957a54a3e07124f11", - "_shrinkwrap": null, - "_spec": "isarray@~1.0.0", - "_where": "/home/dold/repos/taler/wallet-webex/node_modules/merge-stream/node_modules/readable-stream", - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "bugs": { - "url": "https://github.com/juliangruber/isarray/issues" - }, - "dependencies": {}, - "description": "Array#isArray for older browsers", - "devDependencies": { - "tape": "~2.13.4" - }, - "directories": {}, - "dist": { - "shasum": "bb935d48582cba168c06834957a54a3e07124f11", - "tarball": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" - }, - "gitHead": "2a23a281f369e9ae06394c0fb4d2381355a6ba33", - "homepage": "https://github.com/juliangruber/isarray", - "keywords": [ - "browser", - "isarray", - "array" - ], - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "juliangruber", - "email": "julian@juliangruber.com" - } - ], - "name": "isarray", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/juliangruber/isarray.git" - }, - "scripts": { - "test": "tape test.js" - }, - "testling": { - "files": "test.js", - "browsers": [ - "ie/8..latest", - "firefox/17..latest", - "firefox/nightly", - "chrome/22..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest", - "android-browser/4.2..latest" - ] - }, - "version": "1.0.0" -} diff --git a/node_modules/merge-stream/node_modules/isarray/test.js b/node_modules/merge-stream/node_modules/isarray/test.js deleted file mode 100644 index e0c3444d8..000000000 --- a/node_modules/merge-stream/node_modules/isarray/test.js +++ /dev/null @@ -1,20 +0,0 @@ -var isArray = require('./'); -var test = require('tape'); - -test('is array', function(t){ - t.ok(isArray([])); - t.notOk(isArray({})); - t.notOk(isArray(null)); - t.notOk(isArray(false)); - - var obj = {}; - obj[0] = true; - t.notOk(isArray(obj)); - - var arr = []; - arr.foo = 'bar'; - t.ok(isArray(arr)); - - t.end(); -}); - diff --git a/node_modules/merge-stream/node_modules/readable-stream/.npmignore b/node_modules/merge-stream/node_modules/readable-stream/.npmignore deleted file mode 100644 index 265ff739e..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/.npmignore +++ /dev/null @@ -1,8 +0,0 @@ -build/ -test/ -examples/ -fs.js -zlib.js -.zuul.yml -.nyc_output -coverage diff --git a/node_modules/merge-stream/node_modules/readable-stream/.travis.yml b/node_modules/merge-stream/node_modules/readable-stream/.travis.yml deleted file mode 100644 index 84504c98f..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/.travis.yml +++ /dev/null @@ -1,49 +0,0 @@ -sudo: false -language: node_js -before_install: - - npm install -g npm@2 - - npm install -g npm -notifications: - email: false -matrix: - fast_finish: true - include: - - node_js: '0.8' - env: TASK=test - - node_js: '0.10' - env: TASK=test - - node_js: '0.11' - env: TASK=test - - node_js: '0.12' - env: TASK=test - - node_js: 1 - env: TASK=test - - node_js: 2 - env: TASK=test - - node_js: 3 - env: TASK=test - - node_js: 4 - env: TASK=test - - node_js: 5 - env: TASK=test - - node_js: 6 - env: TASK=test - - node_js: 5 - env: TASK=browser BROWSER_NAME=android BROWSER_VERSION="4.0..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=ie BROWSER_VERSION="9..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=opera BROWSER_VERSION="11..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=chrome BROWSER_VERSION="-3..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=firefox BROWSER_VERSION="-3..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=safari BROWSER_VERSION="5..latest" - - node_js: 5 - env: TASK=browser BROWSER_NAME=microsoftedge BROWSER_VERSION=latest -script: "npm run $TASK" -env: - global: - - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= - - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/merge-stream/node_modules/readable-stream/LICENSE b/node_modules/merge-stream/node_modules/readable-stream/LICENSE deleted file mode 100644 index e3d4e695a..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/node_modules/merge-stream/node_modules/readable-stream/README.md b/node_modules/merge-stream/node_modules/readable-stream/README.md deleted file mode 100644 index 9fb4feaaa..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/README.md +++ /dev/null @@ -1,36 +0,0 @@ -# readable-stream - -***Node-core v6.3.1 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) - - -[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) -[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) - - -[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) - -```bash -npm install --save readable-stream -``` - -***Node-core streams for userland*** - -This package is a mirror of the Streams2 and Streams3 implementations in -Node-core, including [documentation](doc/stream.md). - -If you want to guarantee a stable streams base, regardless of what version of -Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). - -As of version 2.0.0 **readable-stream** uses semantic versioning. - -# Streams WG Team Members - -* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> - - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B -* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> - - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 -* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> - - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D -* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> -* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> -* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> diff --git a/node_modules/merge-stream/node_modules/readable-stream/doc/stream.md b/node_modules/merge-stream/node_modules/readable-stream/doc/stream.md deleted file mode 100644 index fc269c8e3..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/doc/stream.md +++ /dev/null @@ -1,2015 +0,0 @@ -# Stream - - Stability: 2 - Stable - -A stream is an abstract interface for working with streaming data in Node.js. -The `stream` module provides a base API that makes it easy to build objects -that implement the stream interface. - -There are many stream objects provided by Node.js. For instance, a -[request to an HTTP server][http-incoming-message] and [`process.stdout`][] -are both stream instances. - -Streams can be readable, writable, or both. All streams are instances of -[`EventEmitter`][]. - -The `stream` module can be accessed using: - -```js -const stream = require('stream'); -``` - -While it is important for all Node.js users to understand how streams works, -the `stream` module itself is most useful for developer's that are creating new -types of stream instances. Developer's who are primarily *consuming* stream -objects will rarely (if ever) have need to use the `stream` module directly. - -## Organization of this document - -This document is divided into two primary sections and third section for -additional notes. The first section explains the elements of the stream API that -are required to *use* streams within an application. The second section explains -the elements of the API that are required to *implement* new types of streams. - -## Types of Streams - -There are four fundamental stream types within Node.js: - -* [Readable][] - streams from which data can be read (for example - [`fs.createReadStream()`][]). -* [Writable][] - streams to which data can be written (for example - [`fs.createWriteStream()`][]). -* [Duplex][] - streams that are both Readable and Writable (for example - [`net.Socket`][]). -* [Transform][] - Duplex streams that can modify or transform the data as it - is written and read (for example [`zlib.createDeflate()`][]). - -### Object Mode - -All streams created by Node.js APIs operate exclusively on strings and `Buffer` -objects. It is possible, however, for stream implementations to work with other -types of JavaScript values (with the exception of `null` which serves a special -purpose within streams). Such streams are considered to operate in "object -mode". - -Stream instances are switched into object mode using the `objectMode` option -when the stream is created. Attempting to switch an existing stream into -object mode is not safe. - -### Buffering - -<!--type=misc--> - -Both [Writable][] and [Readable][] streams will store data in an internal -buffer that can be retrieved using `writable._writableState.getBuffer()` or -`readable._readableState.buffer`, respectively. - -The amount of data potentially buffered depends on the `highWaterMark` option -passed into the streams constructor. For normal streams, the `highWaterMark` -option specifies a total number of bytes. For streams operating in object mode, -the `highWaterMark` specifies a total number of objects. - -Data is buffered in Readable streams when the implementation calls -[`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not -call [`stream.read()`][stream-read], the data will sit in the internal -queue until it is consumed. - -Once the total size of the internal read buffer reaches the threshold specified -by `highWaterMark`, the stream will temporarily stop reading data from the -underlying resource until the data currently buffered can be consumed (that is, -the stream will stop calling the internal `readable._read()` method that is -used to fill the read buffer). - -Data is buffered in Writable streams when the -[`writable.write(chunk)`][stream-write] method is called repeatedly. While the -total size of the internal write buffer is below the threshold set by -`highWaterMark`, calls to `writable.write()` will return `true`. Once the -the size of the internal buffer reaches or exceeds the `highWaterMark`, `false` -will be returned. - -A key goal of the `stream` API, and in particular the [`stream.pipe()`] method, -is to limit the buffering of data to acceptable levels such that sources and -destinations of differing speeds will not overwhelm the available memory. - -Because [Duplex][] and [Transform][] streams are both Readable and Writable, -each maintain *two* separate internal buffers used for reading and writing, -allowing each side to operate independently of the other while maintaining an -appropriate and efficient flow of data. For example, [`net.Socket`][] instances -are [Duplex][] streams whose Readable side allows consumption of data received -*from* the socket and whose Writable side allows writing data *to* the socket. -Because data may be written to the socket at a faster or slower rate than data -is received, it is important each side operate (and buffer) independently of -the other. - -## API for Stream Consumers - -<!--type=misc--> - -Almost all Node.js applications, no matter how simple, use streams in some -manner. The following is an example of using streams in a Node.js application -that implements an HTTP server: - -```js -const http = require('http'); - -const server = http.createServer( (req, res) => { - // req is an http.IncomingMessage, which is a Readable Stream - // res is an http.ServerResponse, which is a Writable Stream - - let body = ''; - // Get the data as utf8 strings. - // If an encoding is not set, Buffer objects will be received. - req.setEncoding('utf8'); - - // Readable streams emit 'data' events once a listener is added - req.on('data', (chunk) => { - body += chunk; - }); - - // the end event indicates that the entire body has been received - req.on('end', () => { - try { - const data = JSON.parse(body); - } catch (er) { - // uh oh! bad json! - res.statusCode = 400; - return res.end(`error: ${er.message}`); - } - - // write back something interesting to the user: - res.write(typeof data); - res.end(); - }); -}); - -server.listen(1337); - -// $ curl localhost:1337 -d '{}' -// object -// $ curl localhost:1337 -d '"foo"' -// string -// $ curl localhost:1337 -d 'not json' -// error: Unexpected token o -``` - -[Writable][] streams (such as `res` in the example) expose methods such as -`write()` and `end()` that are used to write data onto the stream. - -[Readable][] streams use the [`EventEmitter`][] API for notifying application -code when data is available to be read off the stream. That available data can -be read from the stream in multiple ways. - -Both [Writable][] and [Readable][] streams use the [`EventEmitter`][] API in -various ways to communicate the current state of the stream. - -[Duplex][] and [Transform][] streams are both [Writable][] and [Readable][]. - -Applications that are either writing data to or consuming data from a stream -are not required to implement the stream interfaces directly and will generally -have no reason to call `require('stream')`. - -Developers wishing to implement new types of streams should refer to the -section [API for Stream Implementers][]. - -### Writable Streams - -Writable streams are an abstraction for a *destination* to which data is -written. - -Examples of [Writable][] streams include: - -* [HTTP requests, on the client][] -* [HTTP responses, on the server][] -* [fs write streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdin][] -* [`process.stdout`][], [`process.stderr`][] - -*Note*: Some of these examples are actually [Duplex][] streams that implement -the [Writable][] interface. - -All [Writable][] streams implement the interface defined by the -`stream.Writable` class. - -While specific instances of [Writable][] streams may differ in various ways, -all Writable streams follow the same fundamental usage pattern as illustrated -in the example below: - -```js -const myStream = getWritableStreamSomehow(); -myStream.write('some data'); -myStream.write('some more data'); -myStream.end('done writing data'); -``` - -#### Class: stream.Writable -<!-- YAML -added: v0.9.4 ---> - -<!--type=class--> - -##### Event: 'close' -<!-- YAML -added: v0.9.4 ---> - -The `'close'` event is emitted when the stream and any of its underlying -resources (a file descriptor, for example) have been closed. The event indicates -that no more events will be emitted, and no further computation will occur. - -Not all Writable streams will emit the `'close'` event. - -##### Event: 'drain' -<!-- YAML -added: v0.9.4 ---> - -If a call to [`stream.write(chunk)`][stream-write] returns `false`, the -`'drain'` event will be emitted when it is appropriate to resume writing data -to the stream. - -```js -// Write the data to the supplied writable stream one million times. -// Be attentive to back-pressure. -function writeOneMillionTimes(writer, data, encoding, callback) { - let i = 1000000; - write(); - function write() { - var ok = true; - do { - i--; - if (i === 0) { - // last time! - writer.write(data, encoding, callback); - } else { - // see if we should continue, or wait - // don't pass the callback, because we're not done yet. - ok = writer.write(data, encoding); - } - } while (i > 0 && ok); - if (i > 0) { - // had to stop early! - // write some more once it drains - writer.once('drain', write); - } - } -} -``` - -##### Event: 'error' -<!-- YAML -added: v0.9.4 ---> - -* {Error} - -The `'error'` event is emitted if an error occurred while writing or piping -data. The listener callback is passed a single `Error` argument when called. - -*Note*: The stream is not closed when the `'error'` event is emitted. - -##### Event: 'finish' -<!-- YAML -added: v0.9.4 ---> - -The `'finish'` event is emitted after the [`stream.end()`][stream-end] method -has been called, and all data has been flushed to the underlying system. - -```js -const writer = getWritableStreamSomehow(); -for (var i = 0; i < 100; i ++) { - writer.write('hello, #${i}!\n'); -} -writer.end('This is the end\n'); -writer.on('finish', () => { - console.error('All writes are now complete.'); -}); -``` - -##### Event: 'pipe' -<!-- YAML -added: v0.9.4 ---> - -* `src` {stream.Readable} source stream that is piping to this writable - -The `'pipe'` event is emitted when the [`stream.pipe()`][] method is called on -a readable stream, adding this writable to its set of destinations. - -```js -const writer = getWritableStreamSomehow(); -const reader = getReadableStreamSomehow(); -writer.on('pipe', (src) => { - console.error('something is piping into the writer'); - assert.equal(src, reader); -}); -reader.pipe(writer); -``` - -##### Event: 'unpipe' -<!-- YAML -added: v0.9.4 ---> - -* `src` {[Readable][] Stream} The source stream that - [unpiped][`stream.unpipe()`] this writable - -The `'unpipe'` event is emitted when the [`stream.unpipe()`][] method is called -on a [Readable][] stream, removing this [Writable][] from its set of -destinations. - -```js -const writer = getWritableStreamSomehow(); -const reader = getReadableStreamSomehow(); -writer.on('unpipe', (src) => { - console.error('Something has stopped piping into the writer.'); - assert.equal(src, reader); -}); -reader.pipe(writer); -reader.unpipe(writer); -``` - -##### writable.cork() -<!-- YAML -added: v0.11.2 ---> - -The `writable.cork()` method forces all written data to be buffered in memory. -The buffered data will be flushed when either the [`stream.uncork()`][] or -[`stream.end()`][stream-end] methods are called. - -The primary intent of `writable.cork()` is to avoid a situation where writing -many small chunks of data to a stream do not cause an backup in the internal -buffer that would have an adverse impact on performance. In such situations, -implementations that implement the `writable._writev()` method can perform -buffered writes in a more optimized manner. - -##### writable.end([chunk][, encoding][, callback]) -<!-- YAML -added: v0.9.4 ---> - -* `chunk` {String|Buffer|any} Optional data to write. For streams not operating - in object mode, `chunk` must be a string or a `Buffer`. For object mode - streams, `chunk` may be any JavaScript value other than `null`. -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Optional callback for when the stream is finished - -Calling the `writable.end()` method signals that no more data will be written -to the [Writable][]. The optional `chunk` and `encoding` arguments allow one -final additional chunk of data to be written immediately before closing the -stream. If provided, the optional `callback` function is attached as a listener -for the [`'finish'`][] event. - -Calling the [`stream.write()`][stream-write] method after calling -[`stream.end()`][stream-end] will raise an error. - -```js -// write 'hello, ' and then end with 'world!' -const file = fs.createWriteStream('example.txt'); -file.write('hello, '); -file.end('world!'); -// writing more now is not allowed! -``` - -##### writable.setDefaultEncoding(encoding) -<!-- YAML -added: v0.11.15 ---> - -* `encoding` {String} The new default encoding -* Return: `this` - -The `writable.setDefaultEncoding()` method sets the default `encoding` for a -[Writable][] stream. - -##### writable.uncork() -<!-- YAML -added: v0.11.2 ---> - -The `writable.uncork()` method flushes all data buffered since -[`stream.cork()`][] was called. - -When using `writable.cork()` and `writable.uncork()` to manage the buffering -of writes to a stream, it is recommended that calls to `writable.uncork()` be -deferred using `process.nextTick()`. Doing so allows batching of all -`writable.write()` calls that occur within a given Node.js event loop phase. - -```js -stream.cork(); -stream.write('some '); -stream.write('data '); -process.nextTick(() => stream.uncork()); -``` - -If the `writable.cork()` method is called multiple times on a stream, the same -number of calls to `writable.uncork()` must be called to flush the buffered -data. - -``` -stream.cork(); -stream.write('some '); -stream.cork(); -stream.write('data '); -process.nextTick(() => { - stream.uncork(); - // The data will not be flushed until uncork() is called a second time. - stream.uncork(); -}); -``` - -##### writable.write(chunk[, encoding][, callback]) -<!-- YAML -added: v0.9.4 ---> - -* `chunk` {String|Buffer} The data to write -* `encoding` {String} The encoding, if `chunk` is a String -* `callback` {Function} Callback for when this chunk of data is flushed -* Returns: {Boolean} `false` if the stream wishes for the calling code to - wait for the `'drain'` event to be emitted before continuing to write - additional data; otherwise `true`. - -The `writable.write()` method writes some data to the stream, and calls the -supplied `callback` once the data has been fully handled. If an error -occurs, the `callback` *may or may not* be called with the error as its -first argument. To reliably detect write errors, add a listener for the -`'error'` event. - -The return value indicates whether the written `chunk` was buffered internally -and the buffer has exceeded the `highWaterMark` configured when the stream was -created. If `false` is returned, further attempts to write data to the stream -should be paused until the `'drain'` event is emitted. - -A Writable stream in object mode will always ignore the `encoding` argument. - -### Readable Streams - -Readable streams are an abstraction for a *source* from which data is -consumed. - -Examples of Readable streams include: - -* [HTTP responses, on the client][http-incoming-message] -* [HTTP requests, on the server][http-incoming-message] -* [fs read streams][] -* [zlib streams][zlib] -* [crypto streams][crypto] -* [TCP sockets][] -* [child process stdout and stderr][] -* [`process.stdin`][] - -All [Readable][] streams implement the interface defined by the -`stream.Readable` class. - -#### Two Modes - -Readable streams effectively operate in one of two modes: flowing and paused. - -When in flowing mode, data is read from the underlying system automatically -and provided to an application as quickly as possible using events via the -[`EventEmitter`][] interface. - -In paused mode, the [`stream.read()`][stream-read] method must be called -explicitly to read chunks of data from the stream. - -All [Readable][] streams begin in paused mode but can be switched to flowing -mode in one of the following ways: - -* Adding a [`'data'`][] event handler. -* Calling the [`stream.resume()`][stream-resume] method. -* Calling the [`stream.pipe()`][] method to send the data to a [Writable][]. - -The Readable can switch back to paused mode using one of the following: - -* If there are no pipe destinations, by calling the - [`stream.pause()`][stream-pause] method. -* If there are pipe destinations, by removing any [`'data'`][] event - handlers, and removing all pipe destinations by calling the - [`stream.unpipe()`][] method. - -The important concept to remember is that a Readable will not generate data -until a mechanism for either consuming or ignoring that data is provided. If -the consuming mechanism is disabled or taken away, the Readable will *attempt* -to stop generating the data. - -*Note*: For backwards compatibility reasons, removing [`'data'`][] event -handlers will **not** automatically pause the stream. Also, if there are piped -destinations, then calling [`stream.pause()`][stream-pause] will not guarantee -that the stream will *remain* paused once those destinations drain and ask for -more data. - -*Note*: If a [Readable][] is switched into flowing mode and there are no -consumers available handle the data, that data will be lost. This can occur, -for instance, when the `readable.resume()` method is called without a listener -attached to the `'data'` event, or when a `'data'` event handler is removed -from the stream. - -#### Three States - -The "two modes" of operation for a Readable stream are a simplified abstraction -for the more complicated internal state management that is happening within the -Readable stream implementation. - -Specifically, at any given point in time, every Readable is in one of three -possible states: - -* `readable._readableState.flowing = null` -* `readable._readableState.flowing = false` -* `readable._readableState.flowing = true` - -When `readable._readableState.flowing` is `null`, no mechanism for consuming the -streams data is provided so the stream will not generate its data. - -Attaching a listener for the `'data'` event, calling the `readable.pipe()` -method, or calling the `readable.resume()` method will switch -`readable._readableState.flowing` to `true`, causing the Readable to begin -actively emitting events as data is generated. - -Calling `readable.pause()`, `readable.unpipe()`, or receiving "back pressure" -will cause the `readable._readableState.flowing` to be set as `false`, -temporarily halting the flowing of events but *not* halting the generation of -data. - -While `readable._readableState.flowing` is `false`, data may be accumulating -within the streams internal buffer. - -#### Choose One - -The Readable stream API evolved across multiple Node.js versions and provides -multiple methods of consuming stream data. In general, developers should choose -*one* of the methods of consuming data and *should never* use multiple methods -to consume data from a single stream. - -Use of the `readable.pipe()` method is recommended for most users as it has been -implemented to provide the easiest way of consuming stream data. Developers that -require more fine-grained control over the transfer and generation of data can -use the [`EventEmitter`][] and `readable.pause()`/`readable.resume()` APIs. - -#### Class: stream.Readable -<!-- YAML -added: v0.9.4 ---> - -<!--type=class--> - -##### Event: 'close' -<!-- YAML -added: v0.9.4 ---> - -The `'close'` event is emitted when the stream and any of its underlying -resources (a file descriptor, for example) have been closed. The event indicates -that no more events will be emitted, and no further computation will occur. - -Not all [Readable][] streams will emit the `'close'` event. - -##### Event: 'data' -<!-- YAML -added: v0.9.4 ---> - -* `chunk` {Buffer|String|any} The chunk of data. For streams that are not - operating in object mode, the chunk will be either a string or `Buffer`. - For streams that are in object mode, the chunk can be any JavaScript value - other than `null`. - -The `'data'` event is emitted whenever the stream is relinquishing ownership of -a chunk of data to a consumer. This may occur whenever the stream is switched -in flowing mode by calling `readable.pipe()`, `readable.resume()`, or by -attaching a listener callback to the `'data'` event. The `'data'` event will -also be emitted whenever the `readable.read()` method is called and a chunk of -data is available to be returned. - -Attaching a `'data'` event listener to a stream that has not been explicitly -paused will switch the stream into flowing mode. Data will then be passed as -soon as it is available. - -The listener callback will be passed the chunk of data as a string if a default -encoding has been specified for the stream using the -`readable.setEncoding()` method; otherwise the data will be passed as a -`Buffer`. - -```js -const readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log(`Received ${chunk.length} bytes of data.`); -}); -``` - -##### Event: 'end' -<!-- YAML -added: v0.9.4 ---> - -The `'end'` event is emitted when there is no more data to be consumed from -the stream. - -*Note*: The `'end'` event **will not be emitted** unless the data is -completely consumed. This can be accomplished by switching the stream into -flowing mode, or by calling [`stream.read()`][stream-read] repeatedly until -all data has been consumed. - -```js -const readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log(`Received ${chunk.length} bytes of data.`); -}); -readable.on('end', () => { - console.log('There will be no more data.'); -}); -``` - -##### Event: 'error' -<!-- YAML -added: v0.9.4 ---> - -* {Error} - -The `'error'` event may be emitted by a Readable implementation at any time. -Typically, this may occur if the underlying stream in unable to generate data -due to an underlying internal failure, or when a stream implementation attempts -to push an invalid chunk of data. - -The listener callback will be passed a single `Error` object. - -##### Event: 'readable' -<!-- YAML -added: v0.9.4 ---> - -The `'readable'` event is emitted when there is data available to be read from -the stream. In some cases, attaching a listener for the `'readable'` event will -cause some amount of data to be read into an internal buffer. - -```javascript -const readable = getReadableStreamSomehow(); -readable.on('readable', () => { - // there is some data to read now -}); -``` -The `'readable'` event will also be emitted once the end of the stream data -has been reached but before the `'end'` event is emitted. - -Effectively, the `'readable'` event indicates that the stream has new -information: either new data is available or the end of the stream has been -reached. In the former case, [`stream.read()`][stream-read] will return the -available data. In the latter case, [`stream.read()`][stream-read] will return -`null`. For instance, in the following example, `foo.txt` is an empty file: - -```js -const fs = require('fs'); -const rr = fs.createReadStream('foo.txt'); -rr.on('readable', () => { - console.log('readable:', rr.read()); -}); -rr.on('end', () => { - console.log('end'); -}); -``` - -The output of running this script is: - -``` -$ node test.js -readable: null -end -``` - -*Note*: In general, the `readable.pipe()` and `'data'` event mechanisms are -preferred over the use of the `'readable'` event. - -##### readable.isPaused() -<!-- -added: v0.11.14 ---> - -* Return: {Boolean} - -The `readable.isPaused()` method returns the current operating state of the -Readable. This is used primarily by the mechanism that underlies the -`readable.pipe()` method. In most typical cases, there will be no reason to -use this method directly. - -```js -const readable = new stream.Readable - -readable.isPaused() // === false -readable.pause() -readable.isPaused() // === true -readable.resume() -readable.isPaused() // === false -``` - -##### readable.pause() -<!-- YAML -added: v0.9.4 ---> - -* Return: `this` - -The `readable.pause()` method will cause a stream in flowing mode to stop -emitting [`'data'`][] events, switching out of flowing mode. Any data that -becomes available will remain in the internal buffer. - -```js -const readable = getReadableStreamSomehow(); -readable.on('data', (chunk) => { - console.log(`Received ${chunk.length} bytes of data.`); - readable.pause(); - console.log('There will be no additional data for 1 second.'); - setTimeout(() => { - console.log('Now data will start flowing again.'); - readable.resume(); - }, 1000); -}); -``` - -##### readable.pipe(destination[, options]) -<!-- YAML -added: v0.9.4 ---> - -* `destination` {stream.Writable} The destination for writing data -* `options` {Object} Pipe options - * `end` {Boolean} End the writer when the reader ends. Defaults to `true`. - -The `readable.pipe()` method attaches a [Writable][] stream to the `readable`, -causing it to switch automatically into flowing mode and push all of its data -to the attached [Writable][]. The flow of data will be automatically managed so -that the destination Writable stream is not overwhelmed by a faster Readable -stream. - -The following example pipes all of the data from the `readable` into a file -named `file.txt`: - -```js -const readable = getReadableStreamSomehow(); -const writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt' -readable.pipe(writable); -``` -It is possible to attach multiple Writable streams to a single Readable stream. - -The `readable.pipe()` method returns a reference to the *destination* stream -making it possible to set up chains of piped streams: - -```js -const r = fs.createReadStream('file.txt'); -const z = zlib.createGzip(); -const w = fs.createWriteStream('file.txt.gz'); -r.pipe(z).pipe(w); -``` - -By default, [`stream.end()`][stream-end] is called on the destination Writable -stream when the source Readable stream emits [`'end'`][], so that the -destination is no longer writable. To disable this default behavior, the `end` -option can be passed as `false`, causing the destination stream to remain open, -as illustrated in the following example: - -```js -reader.pipe(writer, { end: false }); -reader.on('end', () => { - writer.end('Goodbye\n'); -}); -``` - -One important caveat is that if the Readable stream emits an error during -processing, the Writable destination *is not closed* automatically. If an -error occurs, it will be necessary to *manually* close each stream in order -to prevent memory leaks. - -*Note*: The [`process.stderr`][] and [`process.stdout`][] Writable streams are -never closed until the Node.js process exits, regardless of the specified -options. - -##### readable.read([size]) -<!-- YAML -added: v0.9.4 ---> - -* `size` {Number} Optional argument to specify how much data to read. -* Return {String|Buffer|Null} - -The `readable.read()` method pulls some data out of the internal buffer and -returns it. If no data available to be read, `null` is returned. By default, -the data will be returned as a `Buffer` object unless an encoding has been -specified using the `readable.setEncoding()` method or the stream is operating -in object mode. - -The optional `size` argument specifies a specific number of bytes to read. If -`size` bytes are not available to be read, `null` will be returned *unless* -the stream has ended, in which case all of the data remaining in the internal -buffer will be returned (*even if it exceeds `size` bytes*). - -If the `size` argument is not specified, all of the data contained in the -internal buffer will be returned. - -The `readable.read()` method should only be called on Readable streams operating -in paused mode. In flowing mode, `readable.read()` is called automatically until -the internal buffer is fully drained. - -```js -const readable = getReadableStreamSomehow(); -readable.on('readable', () => { - var chunk; - while (null !== (chunk = readable.read())) { - console.log(`Received ${chunk.length} bytes of data.`); - } -}); -``` - -In general, it is recommended that developers avoid the use of the `'readable'` -event and the `readable.read()` method in favor of using either -`readable.pipe()` or the `'data'` event. - -A Readable stream in object mode will always return a single item from -a call to [`readable.read(size)`][stream-read], regardless of the value of the -`size` argument. - -*Note:* If the `readable.read()` method returns a chunk of data, a `'data'` -event will also be emitted. - -*Note*: Calling [`stream.read([size])`][stream-read] after the [`'end'`][] -event has been emitted will return `null`. No runtime error will be raised. - -##### readable.resume() -<!-- YAML -added: v0.9.4 ---> - -* Return: `this` - -The `readable.resume()` method causes an explicitly paused Readable stream to -resume emitting [`'data'`][] events, switching the stream into flowing mode. - -The `readable.resume()` method can be used to fully consume the data from a -stream without actually processing any of that data as illustrated in the -following example: - -```js -getReadableStreamSomehow() - .resume() - .on('end', () => { - console.log('Reached the end, but did not read anything.'); - }); -``` - -##### readable.setEncoding(encoding) -<!-- YAML -added: v0.9.4 ---> - -* `encoding` {String} The encoding to use. -* Return: `this` - -The `readable.setEncoding()` method sets the default character encoding for -data read from the Readable stream. - -Setting an encoding causes the stream data -to be returned as string of the specified encoding rather than as `Buffer` -objects. For instance, calling `readable.setEncoding('utf8')` will cause the -output data will be interpreted as UTF-8 data, and passed as strings. Calling -`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal -string format. - -The Readable stream will properly handle multi-byte characters delivered through -the stream that would otherwise become improperly decoded if simply pulled from -the stream as `Buffer` objects. - -Encoding can be disabled by calling `readable.setEncoding(null)`. This approach -is useful when working with binary data or with large multi-byte strings spread -out over multiple chunks. - -```js -const readable = getReadableStreamSomehow(); -readable.setEncoding('utf8'); -readable.on('data', (chunk) => { - assert.equal(typeof chunk, 'string'); - console.log('got %d characters of string data', chunk.length); -}); -``` - -##### readable.unpipe([destination]) -<!-- YAML -added: v0.9.4 ---> - -* `destination` {stream.Writable} Optional specific stream to unpipe - -The `readable.unpipe()` method detaches a Writable stream previously attached -using the [`stream.pipe()`][] method. - -If the `destination` is not specified, then *all* pipes are detached. - -If the `destination` is specified, but no pipe is set up for it, then -the method does nothing. - -```js -const readable = getReadableStreamSomehow(); -const writable = fs.createWriteStream('file.txt'); -// All the data from readable goes into 'file.txt', -// but only for the first second -readable.pipe(writable); -setTimeout(() => { - console.log('Stop writing to file.txt'); - readable.unpipe(writable); - console.log('Manually close the file stream'); - writable.end(); -}, 1000); -``` - -##### readable.unshift(chunk) -<!-- YAML -added: v0.9.11 ---> - -* `chunk` {Buffer|String} Chunk of data to unshift onto the read queue - -The `readable.unshift()` method pushes a chunk of data back into the internal -buffer. This is useful in certain situations where a stream is being consumed by -code that needs to "un-consume" some amount of data that it has optimistically -pulled out of the source, so that the data can be passed on to some other party. - -*Note*: The `stream.unshift(chunk)` method cannot be called after the -[`'end'`][] event has been emitted or a runtime error will be thrown. - -Developers using `stream.unshift()` often should consider switching to -use of a [Transform][] stream instead. See the [API for Stream Implementers][] -section for more information. - -```js -// Pull off a header delimited by \n\n -// use unshift() if we get too much -// Call the callback with (error, header, stream) -const StringDecoder = require('string_decoder').StringDecoder; -function parseHeader(stream, callback) { - stream.on('error', callback); - stream.on('readable', onReadable); - const decoder = new StringDecoder('utf8'); - var header = ''; - function onReadable() { - var chunk; - while (null !== (chunk = stream.read())) { - var str = decoder.write(chunk); - if (str.match(/\n\n/)) { - // found the header boundary - var split = str.split(/\n\n/); - header += split.shift(); - const remaining = split.join('\n\n'); - const buf = Buffer.from(remaining, 'utf8'); - if (buf.length) - stream.unshift(buf); - stream.removeListener('error', callback); - stream.removeListener('readable', onReadable); - // now the body of the message can be read from the stream. - callback(null, header, stream); - } else { - // still reading the header. - header += str; - } - } - } -} -``` - -*Note*: Unlike [`stream.push(chunk)`][stream-push], `stream.unshift(chunk)` -will not end the reading process by resetting the internal reading state of the -stream. This can cause unexpected results if `readable.unshift()` is called -during a read (i.e. from within a [`stream._read()`][stream-_read] -implementation on a custom stream). Following the call to `readable.unshift()` -with an immediate [`stream.push('')`][stream-push] will reset the reading state -appropriately, however it is best to simply avoid calling `readable.unshift()` -while in the process of performing a read. - -##### readable.wrap(stream) -<!-- YAML -added: v0.9.4 ---> - -* `stream` {Stream} An "old style" readable stream - -Versions of Node.js prior to v0.10 had streams that did not implement the -entire `stream` module API as it is currently defined. (See [Compatibility][] -for more information.) - -When using an older Node.js library that emits [`'data'`][] events and has a -[`stream.pause()`][stream-pause] method that is advisory only, the -`readable.wrap()` method can be used to create a [Readable][] stream that uses -the old stream as its data source. - -It will rarely be necessary to use `readable.wrap()` but the method has been -provided as a convenience for interacting with older Node.js applications and -libraries. - -For example: - -```js -const OldReader = require('./old-api-module.js').OldReader; -const Readable = require('stream').Readable; -const oreader = new OldReader; -const myReader = new Readable().wrap(oreader); - -myReader.on('readable', () => { - myReader.read(); // etc. -}); -``` - -### Duplex and Transform Streams - -#### Class: stream.Duplex -<!-- YAML -added: v0.9.4 ---> - -<!--type=class--> - -Duplex streams are streams that implement both the [Readable][] and -[Writable][] interfaces. - -Examples of Duplex streams include: - -* [TCP sockets][] -* [zlib streams][zlib] -* [crypto streams][crypto] - -#### Class: stream.Transform -<!-- YAML -added: v0.9.4 ---> - -<!--type=class--> - -Transform streams are [Duplex][] streams where the output is in some way -related to the input. Like all [Duplex][] streams, Transform streams -implement both the [Readable][] and [Writable][] interfaces. - -Examples of Transform streams include: - -* [zlib streams][zlib] -* [crypto streams][crypto] - - -## API for Stream Implementers - -<!--type=misc--> - -The `stream` module API has been designed to make it possible to easily -implement streams using JavaScript's prototypical inheritance model. - -First, a stream developer would declare a new JavaScript class that extends one -of the four basic stream classes (`stream.Writable`, `stream.Readable`, -`stream.Duplex`, or `stream.Transform`), making sure the call the appropriate -parent class constructor: - -```js -const Writable = require('stream').Writable; - -class MyWritable extends Writable { - constructor(options) { - super(options); - } -} -``` - -The new stream class must then implement one or more specific methods, depending -on the type of stream being created, as detailed in the chart below: - -<table> - <thead> - <tr> - <th> - <p>Use-case</p> - </th> - <th> - <p>Class</p> - </th> - <th> - <p>Method(s) to implement</p> - </th> - </tr> - </thead> - <tr> - <td> - <p>Reading only</p> - </td> - <td> - <p>[Readable](#stream_class_stream_readable)</p> - </td> - <td> - <p><code>[_read][stream-_read]</code></p> - </td> - </tr> - <tr> - <td> - <p>Writing only</p> - </td> - <td> - <p>[Writable](#stream_class_stream_writable)</p> - </td> - <td> - <p><code>[_write][stream-_write]</code>, <code>[_writev][stream-_writev]</code></p> - </td> - </tr> - <tr> - <td> - <p>Reading and writing</p> - </td> - <td> - <p>[Duplex](#stream_class_stream_duplex)</p> - </td> - <td> - <p><code>[_read][stream-_read]</code>, <code>[_write][stream-_write]</code>, <code>[_writev][stream-_writev]</code></p> - </td> - </tr> - <tr> - <td> - <p>Operate on written data, then read the result</p> - </td> - <td> - <p>[Transform](#stream_class_stream_transform)</p> - </td> - <td> - <p><code>[_transform][stream-_transform]</code>, <code>[_flush][stream-_flush]</code></p> - </td> - </tr> -</table> - -*Note*: The implementation code for a stream should *never* call the "public" -methods of a stream that are intended for use by consumers (as described in -the [API for Stream Consumers][] section). Doing so may lead to adverse -side effects in application code consuming the stream. - -### Simplified Construction - -For many simple cases, it is possible to construct a stream without relying on -inheritance. This can be accomplished by directly creating instances of the -`stream.Writable`, `stream.Readable`, `stream.Duplex` or `stream.Transform` -objects and passing appropriate methods as constructor options. - -For example: - -```js -const Writable = require('stream').Writable; - -const myWritable = new Writable({ - write(chunk, encoding, callback) { - // ... - } -}); -``` - -### Implementing a Writable Stream - -The `stream.Writable` class is extended to implement a [Writable][] stream. - -Custom Writable streams *must* call the `new stream.Writable([options])` -constructor and implement the `writable._write()` method. The -`writable._writev()` method *may* also be implemented. - -#### Constructor: new stream.Writable([options]) - -* `options` {Object} - * `highWaterMark` {Number} Buffer level when - [`stream.write()`][stream-write] starts returning `false`. Defaults to - `16384` (16kb), or `16` for `objectMode` streams. - * `decodeStrings` {Boolean} Whether or not to decode strings into - Buffers before passing them to [`stream._write()`][stream-_write]. - Defaults to `true` - * `objectMode` {Boolean} Whether or not the - [`stream.write(anyObj)`][stream-write] is a valid operation. When set, - it becomes possible to write JavaScript values other than string or - `Buffer` if supported by the stream implementation. Defaults to `false` - * `write` {Function} Implementation for the - [`stream._write()`][stream-_write] method. - * `writev` {Function} Implementation for the - [`stream._writev()`][stream-_writev] method. - -For example: - -```js -const Writable = require('stream').Writable; - -class MyWritable extends Writable { - constructor(options) { - // Calls the stream.Writable() constructor - super(options); - } -} -``` - -Or, when using pre-ES6 style constructors: - -```js -const Writable = require('stream').Writable; -const util = require('util'); - -function MyWritable(options) { - if (!(this instanceof MyWritable)) - return new MyWritable(options); - Writable.call(this, options); -} -util.inherits(MyWritable, Writable); -``` - -Or, using the Simplified Constructor approach: - -```js -const Writable = require('stream').Writable; - -const myWritable = new Writable({ - write(chunk, encoding, callback) { - // ... - }, - writev(chunks, callback) { - // ... - } -}); -``` - -#### writable.\_write(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be written. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then `encoding` is the - character encoding of that string. If chunk is a `Buffer`, or if the - stream is operating in object mode, `encoding` may be ignored. -* `callback` {Function} Call this function (optionally with an error - argument) when processing is complete for the supplied chunk. - -All Writable stream implementations must provide a -[`writable._write()`][stream-_write] method to send data to the underlying -resource. - -*Note*: [Transform][] streams provide their own implementation of the -[`writable._write()`][stream-_write]. - -*Note*: **This function MUST NOT be called by application code directly.** It -should be implemented by child classes, and called only by the internal Writable -class methods only. - -The `callback` method must be called to signal either that the write completed -successfully or failed with an error. The first argument passed to the -`callback` must be the `Error` object if the call failed or `null` if the -write succeeded. - -It is important to note that all calls to `writable.write()` that occur between -the time `writable._write()` is called and the `callback` is called will cause -the written data to be buffered. Once the `callback` is invoked, the stream will -emit a `'drain'` event. If a stream implementation is capable of processing -multiple chunks of data at once, the `writable._writev()` method should be -implemented. - -If the `decodeStrings` property is set in the constructor options, then -`chunk` may be a string rather than a Buffer, and `encoding` will -indicate the character encoding of the string. This is to support -implementations that have an optimized handling for certain string -data encodings. If the `decodeStrings` property is explicitly set to `false`, -the `encoding` argument can be safely ignored, and `chunk` will always be a -`Buffer`. - -The `writable._write()` method is prefixed with an underscore because it is -internal to the class that defines it, and should never be called directly by -user programs. - -#### writable.\_writev(chunks, callback) - -* `chunks` {Array} The chunks to be written. Each chunk has following - format: `{ chunk: ..., encoding: ... }`. -* `callback` {Function} A callback function (optionally with an error - argument) to be invoked when processing is complete for the supplied chunks. - -*Note*: **This function MUST NOT be called by application code directly.** It -should be implemented by child classes, and called only by the internal Writable -class methods only. - -The `writable._writev()` method may be implemented in addition to -`writable._write()` in stream implementations that are capable of processing -multiple chunks of data at once. If implemented, the method will be called with -all chunks of data currently buffered in the write queue. - -The `writable._writev()` method is prefixed with an underscore because it is -internal to the class that defines it, and should never be called directly by -user programs. - -#### Errors While Writing - -It is recommended that errors occurring during the processing of the -`writable._write()` and `writable._writev()` methods are reported by invoking -the callback and passing the error as the first argument. This will cause an -`'error'` event to be emitted by the Writable. Throwing an Error from within -`writable._write()` can result in expected and inconsistent behavior depending -on how the stream is being used. Using the callback ensures consistent and -predictable handling of errors. - -```js -const Writable = require('stream').Writable; - -const myWritable = new Writable({ - write(chunk, encoding, callback) { - if (chunk.toString().indexOf('a') >= 0) { - callback(new Error('chunk is invalid')); - } else { - callback(); - } - } -}); -``` - -#### An Example Writable Stream - -The following illustrates a rather simplistic (and somewhat pointless) custom -Writable stream implementation. While this specific Writable stream instance -is not of any real particular usefulness, the example illustrates each of the -required elements of a custom [Writable][] stream instance: - -```js -const Writable = require('stream').Writable; - -class MyWritable extends Writable { - constructor(options) { - super(options); - } - - _write(chunk, encoding, callback) { - if (chunk.toString().indexOf('a') >= 0) { - callback(new Error('chunk is invalid')); - } else { - callback(); - } - } -} -``` - -### Implementing a Readable Stream - -The `stream.Readable` class is extended to implement a [Readable][] stream. - -Custom Readable streams *must* call the `new stream.Readable([options])` -constructor and implement the `readable._read()` method. - -#### new stream.Readable([options]) - -* `options` {Object} - * `highWaterMark` {Number} The maximum number of bytes to store in - the internal buffer before ceasing to read from the underlying - resource. Defaults to `16384` (16kb), or `16` for `objectMode` streams - * `encoding` {String} If specified, then buffers will be decoded to - strings using the specified encoding. Defaults to `null` - * `objectMode` {Boolean} Whether this stream should behave - as a stream of objects. Meaning that [`stream.read(n)`][stream-read] returns - a single value instead of a Buffer of size n. Defaults to `false` - * `read` {Function} Implementation for the [`stream._read()`][stream-_read] - method. - -For example: - -```js -const Readable = require('stream').Readable; - -class MyReadable extends Readable { - constructor(options) { - // Calls the stream.Readable(options) constructor - super(options); - } -} -``` - -Or, when using pre-ES6 style constructors: - -```js -const Readable = require('stream').Readable; -const util = require('util'); - -function MyReadable(options) { - if (!(this instanceof MyReadable)) - return new MyReadable(options); - Readable.call(this, options); -} -util.inherits(MyReadable, Readable); -``` - -Or, using the Simplified Constructor approach: - -```js -const Readable = require('stream').Readable; - -const myReadable = new Readable({ - read(size) { - // ... - } -}); -``` - -#### readable.\_read(size) - -* `size` {Number} Number of bytes to read asynchronously - -*Note*: **This function MUST NOT be called by application code directly.** It -should be implemented by child classes, and called only by the internal Readable -class methods only. - -All Readable stream implementations must provide an implementation of the -`readable._read()` method to fetch data from the underlying resource. - -When `readable._read()` is called, if data is available from the resource, the -implementation should begin pushing that data into the read queue using the -[`this.push(dataChunk)`][stream-push] method. `_read()` should continue reading -from the resource and pushing data until `readable.push()` returns `false`. Only -when `_read()` is called again after it has stopped should it resume pushing -additional data onto the queue. - -*Note*: Once the `readable._read()` method has been called, it will not be -called again until the [`readable.push()`][stream-push] method is called. - -The `size` argument is advisory. For implementations where a "read" is a -single operation that returns data can use the `size` argument to determine how -much data to fetch. Other implementations may ignore this argument and simply -provide data whenever it becomes available. There is no need to "wait" until -`size` bytes are available before calling [`stream.push(chunk)`][stream-push]. - -The `readable._read()` method is prefixed with an underscore because it is -internal to the class that defines it, and should never be called directly by -user programs. - -#### readable.push(chunk[, encoding]) - -* `chunk` {Buffer|Null|String} Chunk of data to push into the read queue -* `encoding` {String} Encoding of String chunks. Must be a valid - Buffer encoding, such as `'utf8'` or `'ascii'` -* Returns {Boolean} `true` if additional chunks of data may continued to be - pushed; `false` otherwise. - -When `chunk` is a `Buffer` or `string`, the `chunk` of data will be added to the -internal queue for users of the stream to consume. Passing `chunk` as `null` -signals the end of the stream (EOF), after which no more data can be written. - -When the Readable is operating in paused mode, the data added with -`readable.push()` can be read out by calling the -[`readable.read()`][stream-read] method when the [`'readable'`][] event is -emitted. - -When the Readable is operating in flowing mode, the data added with -`readable.push()` will be delivered by emitting a `'data'` event. - -The `readable.push()` method is designed to be as flexible as possible. For -example, when wrapping a lower-level source that provides some form of -pause/resume mechanism, and a data callback, the low-level source can be wrapped -by the custom Readable instance as illustrated in the following example: - -```js -// source is an object with readStop() and readStart() methods, -// and an `ondata` member that gets called when it has data, and -// an `onend` member that gets called when the data is over. - -class SourceWrapper extends Readable { - constructor(options) { - super(options); - - this._source = getLowlevelSourceObject(); - - // Every time there's data, push it into the internal buffer. - this._source.ondata = (chunk) => { - // if push() returns false, then stop reading from source - if (!this.push(chunk)) - this._source.readStop(); - }; - - // When the source ends, push the EOF-signaling `null` chunk - this._source.onend = () => { - this.push(null); - }; - } - // _read will be called when the stream wants to pull more data in - // the advisory size argument is ignored in this case. - _read(size) { - this._source.readStart(); - } -} -``` -*Note*: The `readable.push()` method is intended be called only by Readable -Implementers, and only from within the `readable._read()` method. - -#### Errors While Reading - -It is recommended that errors occurring during the processing of the -`readable._read()` method are emitted using the `'error'` event rather than -being thrown. Throwing an Error from within `readable._read()` can result in -expected and inconsistent behavior depending on whether the stream is operating -in flowing or paused mode. Using the `'error'` event ensures consistent and -predictable handling of errors. - -```js -const Readable = require('stream').Readable; - -const myReadable = new Readable({ - read(size) { - if (checkSomeErrorCondition()) { - process.nextTick(() => this.emit('error', err)); - return; - } - // do some work - } -}); -``` - -#### An Example Counting Stream - -<!--type=example--> - -The following is a basic example of a Readable stream that emits the numerals -from 1 to 1,000,000 in ascending order, and then ends. - -```js -const Readable = require('stream').Readable; - -class Counter extends Readable { - constructor(opt) { - super(opt); - this._max = 1000000; - this._index = 1; - } - - _read() { - var i = this._index++; - if (i > this._max) - this.push(null); - else { - var str = '' + i; - var buf = Buffer.from(str, 'ascii'); - this.push(buf); - } - } -} -``` - -### Implementing a Duplex Stream - -A [Duplex][] stream is one that implements both [Readable][] and [Writable][], -such as a TCP socket connection. - -Because Javascript does not have support for multiple inheritance, the -`stream.Duplex` class is extended to implement a [Duplex][] stream (as opposed -to extending the `stream.Readable` *and* `stream.Writable` classes). - -*Note*: The `stream.Duplex` class prototypically inherits from `stream.Readable` -and parasitically from `stream.Writable`. - -Custom Duplex streams *must* call the `new stream.Duplex([options])` -constructor and implement *both* the `readable._read()` and -`writable._write()` methods. - -#### new stream.Duplex(options) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `allowHalfOpen` {Boolean} Defaults to `true`. If set to `false`, then - the stream will automatically end the readable side when the - writable side ends and vice versa. - * `readableObjectMode` {Boolean} Defaults to `false`. Sets `objectMode` - for readable side of the stream. Has no effect if `objectMode` - is `true`. - * `writableObjectMode` {Boolean} Defaults to `false`. Sets `objectMode` - for writable side of the stream. Has no effect if `objectMode` - is `true`. - -For example: - -```js -const Duplex = require('stream').Duplex; - -class MyDuplex extends Duplex { - constructor(options) { - super(options); - } -} -``` - -Or, when using pre-ES6 style constructors: - -```js -const Duplex = require('stream').Duplex; -const util = require('util'); - -function MyDuplex(options) { - if (!(this instanceof MyDuplex)) - return new MyDuplex(options); - Duplex.call(this, options); -} -util.inherits(MyDuplex, Duplex); -``` - -Or, using the Simplified Constructor approach: - -```js -const Duplex = require('stream').Duplex; - -const myDuplex = new Duplex({ - read(size) { - // ... - }, - write(chunk, encoding, callback) { - // ... - } -}); -``` - -#### An Example Duplex Stream - -The following illustrates a simple example of a Duplex stream that wraps a -hypothetical lower-level source object to which data can be written, and -from which data can be read, albeit using an API that is not compatible with -Node.js streams. -The following illustrates a simple example of a Duplex stream that buffers -incoming written data via the [Writable][] interface that is read back out -via the [Readable][] interface. - -```js -const Duplex = require('stream').Duplex; -const kSource = Symbol('source'); - -class MyDuplex extends Duplex { - constructor(source, options) { - super(options); - this[kSource] = source; - } - - _write(chunk, encoding, callback) { - // The underlying source only deals with strings - if (Buffer.isBuffer(chunk)) - chunk = chunk.toString(encoding); - this[kSource].writeSomeData(chunk, encoding); - callback(); - } - - _read(size) { - this[kSource].fetchSomeData(size, (data, encoding) => { - this.push(Buffer.from(data, encoding)); - }); - } -} -``` - -The most important aspect of a Duplex stream is that the Readable and Writable -sides operate independently of one another despite co-existing within a single -object instance. - -#### Object Mode Duplex Streams - -For Duplex streams, `objectMode` can be set exclusively for either the Readable -or Writable side using the `readableObjectMode` and `writableObjectMode` options -respectively. - -In the following example, for instance, a new Transform stream (which is a -type of [Duplex][] stream) is created that has an object mode Writable side -that accepts JavaScript numbers that are converted to hexidecimal strings on -the Readable side. - -```js -const Transform = require('stream').Transform; - -// All Transform streams are also Duplex Streams -const myTransform = new Transform({ - writableObjectMode: true, - - transform(chunk, encoding, callback) { - // Coerce the chunk to a number if necessary - chunk |= 0; - - // Transform the chunk into something else. - const data = chunk.toString(16); - - // Push the data onto the readable queue. - callback(null, '0'.repeat(data.length % 2) + data); - } -}); - -myTransform.setEncoding('ascii'); -myTransform.on('data', (chunk) => console.log(chunk)); - -myTransform.write(1); - // Prints: 01 -myTransform.write(10); - // Prints: 0a -myTransform.write(100); - // Prints: 64 -``` - -### Implementing a Transform Stream - -A [Transform][] stream is a [Duplex][] stream where the output is computed -in some way from the input. Examples include [zlib][] streams or [crypto][] -streams that compress, encrypt, or decrypt data. - -*Note*: There is no requirement that the output be the same size as the input, -the same number of chunks, or arrive at the same time. For example, a -Hash stream will only ever have a single chunk of output which is -provided when the input is ended. A `zlib` stream will produce output -that is either much smaller or much larger than its input. - -The `stream.Transform` class is extended to implement a [Transform][] stream. - -The `stream.Transform` class prototypically inherits from `stream.Duplex` and -implements its own versions of the `writable._write()` and `readable._read()` -methods. Custom Transform implementations *must* implement the -[`transform._transform()`][stream-_transform] method and *may* also implement -the [`transform._flush()`][stream-_flush] method. - -*Note*: Care must be taken when using Transform streams in that data written -to the stream can cause the Writable side of the stream to become paused if -the output on the Readable side is not consumed. - -#### new stream.Transform([options]) - -* `options` {Object} Passed to both Writable and Readable - constructors. Also has the following fields: - * `transform` {Function} Implementation for the - [`stream._transform()`][stream-_transform] method. - * `flush` {Function} Implementation for the [`stream._flush()`][stream-_flush] - method. - -For example: - -```js -const Transform = require('stream').Transform; - -class MyTransform extends Transform { - constructor(options) { - super(options); - } -} -``` - -Or, when using pre-ES6 style constructors: - -```js -const Transform = require('stream').Transform; -const util = require('util'); - -function MyTransform(options) { - if (!(this instanceof MyTransform)) - return new MyTransform(options); - Transform.call(this, options); -} -util.inherits(MyTransform, Transform); -``` - -Or, using the Simplified Constructor approach: - -```js -const Transform = require('stream').Transform; - -const myTransform = new Transform({ - transform(chunk, encoding, callback) { - // ... - } -}); -``` - -#### Events: 'finish' and 'end' - -The [`'finish'`][] and [`'end'`][] events are from the `stream.Writable` -and `stream.Readable` classes, respectively. The `'finish'` event is emitted -after [`stream.end()`][stream-end] is called and all chunks have been processed -by [`stream._transform()`][stream-_transform]. The `'end'` event is emitted -after all data has been output, which occurs after the callback in -[`transform._flush()`][stream-_flush] has been called. - -#### transform.\_flush(callback) - -* `callback` {Function} A callback function (optionally with an error - argument) to be called when remaining data has been flushed. - -*Note*: **This function MUST NOT be called by application code directly.** It -should be implemented by child classes, and called only by the internal Readable -class methods only. - -In some cases, a transform operation may need to emit an additional bit of -data at the end of the stream. For example, a `zlib` compression stream will -store an amount of internal state used to optimally compress the output. When -the stream ends, however, that additional data needs to be flushed so that the -compressed data will be complete. - -Custom [Transform][] implementations *may* implement the `transform._flush()` -method. This will be called when there is no more written data to be consumed, -but before the [`'end'`][] event is emitted signaling the end of the -[Readable][] stream. - -Within the `transform._flush()` implementation, the `readable.push()` method -may be called zero or more times, as appropriate. The `callback` function must -be called when the flush operation is complete. - -The `transform._flush()` method is prefixed with an underscore because it is -internal to the class that defines it, and should never be called directly by -user programs. - -#### transform.\_transform(chunk, encoding, callback) - -* `chunk` {Buffer|String} The chunk to be transformed. Will **always** - be a buffer unless the `decodeStrings` option was set to `false`. -* `encoding` {String} If the chunk is a string, then this is the - encoding type. If chunk is a buffer, then this is the special - value - 'buffer', ignore it in this case. -* `callback` {Function} A callback function (optionally with an error - argument and data) to be called after the supplied `chunk` has been - processed. - -*Note*: **This function MUST NOT be called by application code directly.** It -should be implemented by child classes, and called only by the internal Readable -class methods only. - -All Transform stream implementations must provide a `_transform()` -method to accept input and produce output. The `transform._transform()` -implementation handles the bytes being written, computes an output, then passes -that output off to the readable portion using the `readable.push()` method. - -The `transform.push()` method may be called zero or more times to generate -output from a single input chunk, depending on how much is to be output -as a result of the chunk. - -It is possible that no output is generated from any given chunk of input data. - -The `callback` function must be called only when the current chunk is completely -consumed. The first argument passed to the `callback` must be an `Error` object -if an error occurred while processing the input or `null` otherwise. If a second -argument is passed to the `callback`, it will be forwarded on to the -`readable.push()` method. In other words the following are equivalent: - -```js -transform.prototype._transform = function (data, encoding, callback) { - this.push(data); - callback(); -}; - -transform.prototype._transform = function (data, encoding, callback) { - callback(null, data); -}; -``` - -The `transform._transform()` method is prefixed with an underscore because it -is internal to the class that defines it, and should never be called directly by -user programs. - -#### Class: stream.PassThrough - -The `stream.PassThrough` class is a trivial implementation of a [Transform][] -stream that simply passes the input bytes across to the output. Its purpose is -primarily for examples and testing, but there are some use cases where -`stream.PassThrough` is useful as a building block for novel sorts of streams. - -## Additional Notes - -<!--type=misc--> - -### Compatibility with Older Node.js Versions - -<!--type=misc--> - -In versions of Node.js prior to v0.10, the Readable stream interface was -simpler, but also less powerful and less useful. - -* Rather than waiting for calls the [`stream.read()`][stream-read] method, - [`'data'`][] events would begin emitting immediately. Applications that - would need to perform some amount of work to decide how to handle data - were required to store read data into buffers so the data would not be lost. -* The [`stream.pause()`][stream-pause] method was advisory, rather than - guaranteed. This meant that it was still necessary to be prepared to receive - [`'data'`][] events *even when the stream was in a paused state*. - -In Node.js v0.10, the [Readable][] class was added. For backwards compatibility -with older Node.js programs, Readable streams switch into "flowing mode" when a -[`'data'`][] event handler is added, or when the -[`stream.resume()`][stream-resume] method is called. The effect is that, even -when not using the new [`stream.read()`][stream-read] method and -[`'readable'`][] event, it is no longer necessary to worry about losing -[`'data'`][] chunks. - -While most applications will continue to function normally, this introduces an -edge case in the following conditions: - -* No [`'data'`][] event listener is added. -* The [`stream.resume()`][stream-resume] method is never called. -* The stream is not piped to any writable destination. - -For example, consider the following code: - -```js -// WARNING! BROKEN! -net.createServer((socket) => { - - // we add an 'end' method, but never consume the data - socket.on('end', () => { - // It will never get here. - socket.end('The message was received but was not processed.\n'); - }); - -}).listen(1337); -``` - -In versions of Node.js prior to v0.10, the incoming message data would be -simply discarded. However, in Node.js v0.10 and beyond, the socket remains -paused forever. - -The workaround in this situation is to call the -[`stream.resume()`][stream-resume] method to begin the flow of data: - -```js -// Workaround -net.createServer((socket) => { - - socket.on('end', () => { - socket.end('The message was received but was not processed.\n'); - }); - - // start the flow of data, discarding it. - socket.resume(); - -}).listen(1337); -``` - -In addition to new Readable streams switching into flowing mode, -pre-v0.10 style streams can be wrapped in a Readable class using the -[`readable.wrap()`][`stream.wrap()`] method. - - -### `readable.read(0)` - -There are some cases where it is necessary to trigger a refresh of the -underlying readable stream mechanisms, without actually consuming any -data. In such cases, it is possible to call `readable.read(0)`, which will -always return `null`. - -If the internal read buffer is below the `highWaterMark`, and the -stream is not currently reading, then calling `stream.read(0)` will trigger -a low-level [`stream._read()`][stream-_read] call. - -While most applications will almost never need to do this, there are -situations within Node.js where this is done, particularly in the -Readable stream class internals. - -### `readable.push('')` - -Use of `readable.push('')` is not recommended. - -Pushing a zero-byte string or `Buffer` to a stream that is not in object mode -has an interesting side effect. Because it *is* a call to -[`readable.push()`][stream-push], the call will end the reading process. -However, because the argument is an empty string, no data is added to the -readable buffer so there is nothing for a user to consume. - -[`'data'`]: #stream_event_data -[`'drain'`]: #stream_event_drain -[`'end'`]: #stream_event_end -[`'finish'`]: #stream_event_finish -[`'readable'`]: #stream_event_readable -[`buf.toString(encoding)`]: https://nodejs.org/docs/v6.3.1/api/buffer.html#buffer_buf_tostring_encoding_start_end -[`EventEmitter`]: https://nodejs.org/docs/v6.3.1/api/events.html#events_class_eventemitter -[`process.stderr`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stderr -[`process.stdin`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stdin -[`process.stdout`]: https://nodejs.org/docs/v6.3.1/api/process.html#process_process_stdout -[`stream.cork()`]: #stream_writable_cork -[`stream.pipe()`]: #stream_readable_pipe_destination_options -[`stream.uncork()`]: #stream_writable_uncork -[`stream.unpipe()`]: #stream_readable_unpipe_destination -[`stream.wrap()`]: #stream_readable_wrap_stream -[`tls.CryptoStream`]: https://nodejs.org/docs/v6.3.1/api/tls.html#tls_class_cryptostream -[API for Stream Consumers]: #stream_api_for_stream_consumers -[API for Stream Implementers]: #stream_api_for_stream_implementers -[child process stdin]: https://nodejs.org/docs/v6.3.1/api/child_process.html#child_process_child_stdin -[child process stdout and stderr]: https://nodejs.org/docs/v6.3.1/api/child_process.html#child_process_child_stdout -[Compatibility]: #stream_compatibility_with_older_node_js_versions -[crypto]: crypto.html -[Duplex]: #stream_class_stream_duplex -[fs read streams]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_class_fs_readstream -[fs write streams]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_class_fs_writestream -[`fs.createReadStream()`]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_fs_createreadstream_path_options -[`fs.createWriteStream()`]: https://nodejs.org/docs/v6.3.1/api/fs.html#fs_fs_createwritestream_path_options -[`net.Socket`]: https://nodejs.org/docs/v6.3.1/api/net.html#net_class_net_socket -[`zlib.createDeflate()`]: https://nodejs.org/docs/v6.3.1/api/zlib.html#zlib_zlib_createdeflate_options -[HTTP requests, on the client]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_clientrequest -[HTTP responses, on the server]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_serverresponse -[http-incoming-message]: https://nodejs.org/docs/v6.3.1/api/http.html#http_class_http_incomingmessage -[Object mode]: #stream_object_mode -[Readable]: #stream_class_stream_readable -[SimpleProtocol v2]: #stream_example_simpleprotocol_parser_v2 -[stream-_flush]: #stream_transform_flush_callback -[stream-_read]: #stream_readable_read_size_1 -[stream-_transform]: #stream_transform_transform_chunk_encoding_callback -[stream-_write]: #stream_writable_write_chunk_encoding_callback_1 -[stream-_writev]: #stream_writable_writev_chunks_callback -[stream-end]: #stream_writable_end_chunk_encoding_callback -[stream-pause]: #stream_readable_pause -[stream-push]: #stream_readable_push_chunk_encoding -[stream-read]: #stream_readable_read_size -[stream-resume]: #stream_readable_resume -[stream-write]: #stream_writable_write_chunk_encoding_callback -[TCP sockets]: https://nodejs.org/docs/v6.3.1/api/net.html#net_class_net_socket -[Transform]: #stream_class_stream_transform -[Writable]: #stream_class_stream_writable -[zlib]: zlib.html diff --git a/node_modules/merge-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/merge-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md deleted file mode 100644 index 83275f192..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md +++ /dev/null @@ -1,60 +0,0 @@ -# streams WG Meeting 2015-01-30 - -## Links - -* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg -* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 -* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ - -## Agenda - -Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. - -* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) -* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) -* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) -* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) - -## Minutes - -### adopt a charter - -* group: +1's all around - -### What versioning scheme should be adopted? -* group: +1’s 3.0.0 -* domenic+group: pulling in patches from other sources where appropriate -* mikeal: version independently, suggesting versions for io.js -* mikeal+domenic: work with TC to notify in advance of changes -simpler stream creation - -### streamline creation of streams -* sam: streamline creation of streams -* domenic: nice simple solution posted - but, we lose the opportunity to change the model - may not be backwards incompatible (double check keys) - - **action item:** domenic will check - -### remove implicit flowing of streams on(‘data’) -* add isFlowing / isPaused -* mikeal: worrying that we’re documenting polyfill methods – confuses users -* domenic: more reflective API is probably good, with warning labels for users -* new section for mad scientists (reflective stream access) -* calvin: name the “third state” -* mikeal: maybe borrow the name from whatwg? -* domenic: we’re missing the “third state” -* consensus: kind of difficult to name the third state -* mikeal: figure out differences in states / compat -* mathias: always flow on data – eliminates third state - * explore what it breaks - -**action items:** -* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) -* ask rod/build for infrastructure -* **chris**: explore the “flow on data” approach -* add isPaused/isFlowing -* add new docs section -* move isPaused to that section - - diff --git a/node_modules/merge-stream/node_modules/readable-stream/duplex.js b/node_modules/merge-stream/node_modules/readable-stream/duplex.js deleted file mode 100644 index ca807af87..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/duplex.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_duplex.js") diff --git a/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_duplex.js deleted file mode 100644 index 736693b84..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_duplex.js +++ /dev/null @@ -1,75 +0,0 @@ -// a duplex stream is just a stream that is both readable and writable. -// Since JS doesn't have multiple prototypal inheritance, this class -// prototypally inherits from Readable, and then parasitically from -// Writable. - -'use strict'; - -/*<replacement>*/ - -var objectKeys = Object.keys || function (obj) { - var keys = []; - for (var key in obj) { - keys.push(key); - }return keys; -}; -/*</replacement>*/ - -module.exports = Duplex; - -/*<replacement>*/ -var processNextTick = require('process-nextick-args'); -/*</replacement>*/ - -/*<replacement>*/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/*</replacement>*/ - -var Readable = require('./_stream_readable'); -var Writable = require('./_stream_writable'); - -util.inherits(Duplex, Readable); - -var keys = objectKeys(Writable.prototype); -for (var v = 0; v < keys.length; v++) { - var method = keys[v]; - if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; -} - -function Duplex(options) { - if (!(this instanceof Duplex)) return new Duplex(options); - - Readable.call(this, options); - Writable.call(this, options); - - if (options && options.readable === false) this.readable = false; - - if (options && options.writable === false) this.writable = false; - - this.allowHalfOpen = true; - if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; - - this.once('end', onend); -} - -// the no-half-open enforcer -function onend() { - // if we allow half-open state, or if the writable side ended, - // then we're ok. - if (this.allowHalfOpen || this._writableState.ended) return; - - // no more data can be written. - // But allow more writes to happen in this tick. - processNextTick(onEndNT, this); -} - -function onEndNT(self) { - self.end(); -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -}
\ No newline at end of file diff --git a/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_passthrough.js deleted file mode 100644 index d06f71f18..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_passthrough.js +++ /dev/null @@ -1,26 +0,0 @@ -// a passthrough stream. -// basically just the most minimal sort of Transform stream. -// Every written chunk gets output as-is. - -'use strict'; - -module.exports = PassThrough; - -var Transform = require('./_stream_transform'); - -/*<replacement>*/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/*</replacement>*/ - -util.inherits(PassThrough, Transform); - -function PassThrough(options) { - if (!(this instanceof PassThrough)) return new PassThrough(options); - - Transform.call(this, options); -} - -PassThrough.prototype._transform = function (chunk, encoding, cb) { - cb(null, chunk); -};
\ No newline at end of file diff --git a/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_readable.js deleted file mode 100644 index 208cc65f1..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_readable.js +++ /dev/null @@ -1,937 +0,0 @@ -'use strict'; - -module.exports = Readable; - -/*<replacement>*/ -var processNextTick = require('process-nextick-args'); -/*</replacement>*/ - -/*<replacement>*/ -var isArray = require('isarray'); -/*</replacement>*/ - -Readable.ReadableState = ReadableState; - -/*<replacement>*/ -var EE = require('events').EventEmitter; - -var EElistenerCount = function (emitter, type) { - return emitter.listeners(type).length; -}; -/*</replacement>*/ - -/*<replacement>*/ -var Stream; -(function () { - try { - Stream = require('st' + 'ream'); - } catch (_) {} finally { - if (!Stream) Stream = require('events').EventEmitter; - } -})(); -/*</replacement>*/ - -var Buffer = require('buffer').Buffer; -/*<replacement>*/ -var bufferShim = require('buffer-shims'); -/*</replacement>*/ - -/*<replacement>*/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/*</replacement>*/ - -/*<replacement>*/ -var debugUtil = require('util'); -var debug = void 0; -if (debugUtil && debugUtil.debuglog) { - debug = debugUtil.debuglog('stream'); -} else { - debug = function () {}; -} -/*</replacement>*/ - -var BufferList = require('./internal/streams/BufferList'); -var StringDecoder; - -util.inherits(Readable, Stream); - -function prependListener(emitter, event, fn) { - if (typeof emitter.prependListener === 'function') { - return emitter.prependListener(event, fn); - } else { - // This is a hack to make sure that our error handler is attached before any - // userland ones. NEVER DO THIS. This is here only because this code needs - // to continue to work with older versions of Node.js that do not include - // the prependListener() method. The goal is to eventually remove this hack. - if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; - } -} - -var Duplex; -function ReadableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // object stream flag. Used to make read(n) ignore n and to - // make all the buffer merging and length checks go away - this.objectMode = !!options.objectMode; - - if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; - - // the point at which it stops calling _read() to fill the buffer - // Note: 0 is a valid value, means "don't call _read preemptively ever" - var hwm = options.highWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; - - // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; - - // A linked list is used to store data chunks instead of an array because the - // linked list can remove elements from the beginning faster than - // array.shift() - this.buffer = new BufferList(); - this.length = 0; - this.pipes = null; - this.pipesCount = 0; - this.flowing = null; - this.ended = false; - this.endEmitted = false; - this.reading = false; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // whenever we return null, then we set a flag to say - // that we're awaiting a 'readable' event emission. - this.needReadable = false; - this.emittedReadable = false; - this.readableListening = false; - this.resumeScheduled = false; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // when piping, we only care about 'readable' events that happen - // after read()ing all the bytes and not getting any pushback. - this.ranOut = false; - - // the number of writers that are awaiting a drain event in .pipe()s - this.awaitDrain = 0; - - // if true, a maybeReadMore has been scheduled - this.readingMore = false; - - this.decoder = null; - this.encoding = null; - if (options.encoding) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this.decoder = new StringDecoder(options.encoding); - this.encoding = options.encoding; - } -} - -var Duplex; -function Readable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - if (!(this instanceof Readable)) return new Readable(options); - - this._readableState = new ReadableState(options, this); - - // legacy - this.readable = true; - - if (options && typeof options.read === 'function') this._read = options.read; - - Stream.call(this); -} - -// Manually shove something into the read() buffer. -// This returns true if the highWaterMark has not been hit yet, -// similar to how Writable.write() returns true if you should -// write() some more. -Readable.prototype.push = function (chunk, encoding) { - var state = this._readableState; - - if (!state.objectMode && typeof chunk === 'string') { - encoding = encoding || state.defaultEncoding; - if (encoding !== state.encoding) { - chunk = bufferShim.from(chunk, encoding); - encoding = ''; - } - } - - return readableAddChunk(this, state, chunk, encoding, false); -}; - -// Unshift should *always* be something directly out of read() -Readable.prototype.unshift = function (chunk) { - var state = this._readableState; - return readableAddChunk(this, state, chunk, '', true); -}; - -Readable.prototype.isPaused = function () { - return this._readableState.flowing === false; -}; - -function readableAddChunk(stream, state, chunk, encoding, addToFront) { - var er = chunkInvalid(state, chunk); - if (er) { - stream.emit('error', er); - } else if (chunk === null) { - state.reading = false; - onEofChunk(stream, state); - } else if (state.objectMode || chunk && chunk.length > 0) { - if (state.ended && !addToFront) { - var e = new Error('stream.push() after EOF'); - stream.emit('error', e); - } else if (state.endEmitted && addToFront) { - var _e = new Error('stream.unshift() after end event'); - stream.emit('error', _e); - } else { - var skipAdd; - if (state.decoder && !addToFront && !encoding) { - chunk = state.decoder.write(chunk); - skipAdd = !state.objectMode && chunk.length === 0; - } - - if (!addToFront) state.reading = false; - - // Don't add to the buffer if we've decoded to an empty string chunk and - // we're not in object mode - if (!skipAdd) { - // if we want the data now, just emit it. - if (state.flowing && state.length === 0 && !state.sync) { - stream.emit('data', chunk); - stream.read(0); - } else { - // update the buffer info. - state.length += state.objectMode ? 1 : chunk.length; - if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); - - if (state.needReadable) emitReadable(stream); - } - } - - maybeReadMore(stream, state); - } - } else if (!addToFront) { - state.reading = false; - } - - return needMoreData(state); -} - -// if it's past the high water mark, we can push in some more. -// Also, if we have no data yet, we can stand some -// more bytes. This is to work around cases where hwm=0, -// such as the repl. Also, if the push() triggered a -// readable event, and the user called read(largeNumber) such that -// needReadable was set, then we ought to push more, so that another -// 'readable' event will be triggered. -function needMoreData(state) { - return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); -} - -// backwards compatibility. -Readable.prototype.setEncoding = function (enc) { - if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; - this._readableState.decoder = new StringDecoder(enc); - this._readableState.encoding = enc; - return this; -}; - -// Don't raise the hwm > 8MB -var MAX_HWM = 0x800000; -function computeNewHighWaterMark(n) { - if (n >= MAX_HWM) { - n = MAX_HWM; - } else { - // Get the next highest power of 2 to prevent increasing hwm excessively in - // tiny amounts - n--; - n |= n >>> 1; - n |= n >>> 2; - n |= n >>> 4; - n |= n >>> 8; - n |= n >>> 16; - n++; - } - return n; -} - -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function howMuchToRead(n, state) { - if (n <= 0 || state.length === 0 && state.ended) return 0; - if (state.objectMode) return 1; - if (n !== n) { - // Only flow one buffer at a time - if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; - } - // If we're asking for more than the current hwm, then raise the hwm. - if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); - if (n <= state.length) return n; - // Don't have enough - if (!state.ended) { - state.needReadable = true; - return 0; - } - return state.length; -} - -// you can override either this method, or the async _read(n) below. -Readable.prototype.read = function (n) { - debug('read', n); - n = parseInt(n, 10); - var state = this._readableState; - var nOrig = n; - - if (n !== 0) state.emittedReadable = false; - - // if we're doing read(0) to trigger a readable event, but we - // already have a bunch of data in the buffer, then just trigger - // the 'readable' event and move on. - if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { - debug('read: emitReadable', state.length, state.ended); - if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); - return null; - } - - n = howMuchToRead(n, state); - - // if we've ended, and we're now clear, then finish it up. - if (n === 0 && state.ended) { - if (state.length === 0) endReadable(this); - return null; - } - - // All the actual chunk generation logic needs to be - // *below* the call to _read. The reason is that in certain - // synthetic stream cases, such as passthrough streams, _read - // may be a completely synchronous operation which may change - // the state of the read buffer, providing enough data when - // before there was *not* enough. - // - // So, the steps are: - // 1. Figure out what the state of things will be after we do - // a read from the buffer. - // - // 2. If that resulting state will trigger a _read, then call _read. - // Note that this may be asynchronous, or synchronous. Yes, it is - // deeply ugly to write APIs this way, but that still doesn't mean - // that the Readable class should behave improperly, as streams are - // designed to be sync/async agnostic. - // Take note if the _read call is sync or async (ie, if the read call - // has returned yet), so that we know whether or not it's safe to emit - // 'readable' etc. - // - // 3. Actually pull the requested chunks out of the buffer and return. - - // if we need a readable event, then we need to do some reading. - var doRead = state.needReadable; - debug('need readable', doRead); - - // if we currently have less than the highWaterMark, then also read some - if (state.length === 0 || state.length - n < state.highWaterMark) { - doRead = true; - debug('length less than watermark', doRead); - } - - // however, if we've ended, then there's no point, and if we're already - // reading, then it's unnecessary. - if (state.ended || state.reading) { - doRead = false; - debug('reading or ended', doRead); - } else if (doRead) { - debug('do read'); - state.reading = true; - state.sync = true; - // if the length is currently zero, then we *need* a readable event. - if (state.length === 0) state.needReadable = true; - // call internal read method - this._read(state.highWaterMark); - state.sync = false; - // If _read pushed data synchronously, then `reading` will be false, - // and we need to re-evaluate how much data we can return to the user. - if (!state.reading) n = howMuchToRead(nOrig, state); - } - - var ret; - if (n > 0) ret = fromList(n, state);else ret = null; - - if (ret === null) { - state.needReadable = true; - n = 0; - } else { - state.length -= n; - } - - if (state.length === 0) { - // If we have nothing in the buffer, then we want to know - // as soon as we *do* get something into the buffer. - if (!state.ended) state.needReadable = true; - - // If we tried to read() past the EOF, then emit end on the next tick. - if (nOrig !== n && state.ended) endReadable(this); - } - - if (ret !== null) this.emit('data', ret); - - return ret; -}; - -function chunkInvalid(state, chunk) { - var er = null; - if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - return er; -} - -function onEofChunk(stream, state) { - if (state.ended) return; - if (state.decoder) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) { - state.buffer.push(chunk); - state.length += state.objectMode ? 1 : chunk.length; - } - } - state.ended = true; - - // emit 'readable' now to make sure it gets picked up. - emitReadable(stream); -} - -// Don't emit readable right away in sync mode, because this can trigger -// another read() call => stack overflow. This way, it might trigger -// a nextTick recursion warning, but that's not so bad. -function emitReadable(stream) { - var state = stream._readableState; - state.needReadable = false; - if (!state.emittedReadable) { - debug('emitReadable', state.flowing); - state.emittedReadable = true; - if (state.sync) processNextTick(emitReadable_, stream);else emitReadable_(stream); - } -} - -function emitReadable_(stream) { - debug('emit readable'); - stream.emit('readable'); - flow(stream); -} - -// at this point, the user has presumably seen the 'readable' event, -// and called read() to consume some data. that may have triggered -// in turn another _read(n) call, in which case reading = true if -// it's in progress. -// However, if we're not ended, or reading, and the length < hwm, -// then go ahead and try to read some more preemptively. -function maybeReadMore(stream, state) { - if (!state.readingMore) { - state.readingMore = true; - processNextTick(maybeReadMore_, stream, state); - } -} - -function maybeReadMore_(stream, state) { - var len = state.length; - while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { - debug('maybeReadMore read 0'); - stream.read(0); - if (len === state.length) - // didn't get any data, stop spinning. - break;else len = state.length; - } - state.readingMore = false; -} - -// abstract method. to be overridden in specific implementation classes. -// call cb(er, data) where data is <= n in length. -// for virtual (non-string, non-buffer) streams, "length" is somewhat -// arbitrary, and perhaps not very meaningful. -Readable.prototype._read = function (n) { - this.emit('error', new Error('not implemented')); -}; - -Readable.prototype.pipe = function (dest, pipeOpts) { - var src = this; - var state = this._readableState; - - switch (state.pipesCount) { - case 0: - state.pipes = dest; - break; - case 1: - state.pipes = [state.pipes, dest]; - break; - default: - state.pipes.push(dest); - break; - } - state.pipesCount += 1; - debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); - - var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; - - var endFn = doEnd ? onend : cleanup; - if (state.endEmitted) processNextTick(endFn);else src.once('end', endFn); - - dest.on('unpipe', onunpipe); - function onunpipe(readable) { - debug('onunpipe'); - if (readable === src) { - cleanup(); - } - } - - function onend() { - debug('onend'); - dest.end(); - } - - // when the dest drains, it reduces the awaitDrain counter - // on the source. This would be more elegant with a .once() - // handler in flow(), but adding and removing repeatedly is - // too slow. - var ondrain = pipeOnDrain(src); - dest.on('drain', ondrain); - - var cleanedUp = false; - function cleanup() { - debug('cleanup'); - // cleanup event handlers once the pipe is broken - dest.removeListener('close', onclose); - dest.removeListener('finish', onfinish); - dest.removeListener('drain', ondrain); - dest.removeListener('error', onerror); - dest.removeListener('unpipe', onunpipe); - src.removeListener('end', onend); - src.removeListener('end', cleanup); - src.removeListener('data', ondata); - - cleanedUp = true; - - // if the reader is waiting for a drain event from this - // specific writer, then it would cause it to never start - // flowing again. - // So, if this is awaiting a drain, then we just call it now. - // If we don't know, then assume that we are waiting for one. - if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); - } - - // If the user pushes more data while we're writing to dest then we'll end up - // in ondata again. However, we only want to increase awaitDrain once because - // dest will only emit one 'drain' event for the multiple writes. - // => Introduce a guard on increasing awaitDrain. - var increasedAwaitDrain = false; - src.on('data', ondata); - function ondata(chunk) { - debug('ondata'); - increasedAwaitDrain = false; - var ret = dest.write(chunk); - if (false === ret && !increasedAwaitDrain) { - // If the user unpiped during `dest.write()`, it is possible - // to get stuck in a permanently paused state if that write - // also returned false. - // => Check whether `dest` is still a piping destination. - if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { - debug('false write response, pause', src._readableState.awaitDrain); - src._readableState.awaitDrain++; - increasedAwaitDrain = true; - } - src.pause(); - } - } - - // if the dest has an error, then stop piping into it. - // however, don't suppress the throwing behavior for this. - function onerror(er) { - debug('onerror', er); - unpipe(); - dest.removeListener('error', onerror); - if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); - } - - // Make sure our error handler is attached before userland ones. - prependListener(dest, 'error', onerror); - - // Both close and finish should trigger unpipe, but only once. - function onclose() { - dest.removeListener('finish', onfinish); - unpipe(); - } - dest.once('close', onclose); - function onfinish() { - debug('onfinish'); - dest.removeListener('close', onclose); - unpipe(); - } - dest.once('finish', onfinish); - - function unpipe() { - debug('unpipe'); - src.unpipe(dest); - } - - // tell the dest that it's being piped to - dest.emit('pipe', src); - - // start the flow if it hasn't been started already. - if (!state.flowing) { - debug('pipe resume'); - src.resume(); - } - - return dest; -}; - -function pipeOnDrain(src) { - return function () { - var state = src._readableState; - debug('pipeOnDrain', state.awaitDrain); - if (state.awaitDrain) state.awaitDrain--; - if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { - state.flowing = true; - flow(src); - } - }; -} - -Readable.prototype.unpipe = function (dest) { - var state = this._readableState; - - // if we're not piping anywhere, then do nothing. - if (state.pipesCount === 0) return this; - - // just one destination. most common case. - if (state.pipesCount === 1) { - // passed in one, but it's not the right one. - if (dest && dest !== state.pipes) return this; - - if (!dest) dest = state.pipes; - - // got a match. - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - if (dest) dest.emit('unpipe', this); - return this; - } - - // slow case. multiple pipe destinations. - - if (!dest) { - // remove all. - var dests = state.pipes; - var len = state.pipesCount; - state.pipes = null; - state.pipesCount = 0; - state.flowing = false; - - for (var _i = 0; _i < len; _i++) { - dests[_i].emit('unpipe', this); - }return this; - } - - // try to find the right one. - var i = indexOf(state.pipes, dest); - if (i === -1) return this; - - state.pipes.splice(i, 1); - state.pipesCount -= 1; - if (state.pipesCount === 1) state.pipes = state.pipes[0]; - - dest.emit('unpipe', this); - - return this; -}; - -// set up data events if they are asked for -// Ensure readable listeners eventually get something -Readable.prototype.on = function (ev, fn) { - var res = Stream.prototype.on.call(this, ev, fn); - - if (ev === 'data') { - // Start flowing on next tick if stream isn't explicitly paused - if (this._readableState.flowing !== false) this.resume(); - } else if (ev === 'readable') { - var state = this._readableState; - if (!state.endEmitted && !state.readableListening) { - state.readableListening = state.needReadable = true; - state.emittedReadable = false; - if (!state.reading) { - processNextTick(nReadingNextTick, this); - } else if (state.length) { - emitReadable(this, state); - } - } - } - - return res; -}; -Readable.prototype.addListener = Readable.prototype.on; - -function nReadingNextTick(self) { - debug('readable nexttick read 0'); - self.read(0); -} - -// pause() and resume() are remnants of the legacy readable stream API -// If the user uses them, then switch into old mode. -Readable.prototype.resume = function () { - var state = this._readableState; - if (!state.flowing) { - debug('resume'); - state.flowing = true; - resume(this, state); - } - return this; -}; - -function resume(stream, state) { - if (!state.resumeScheduled) { - state.resumeScheduled = true; - processNextTick(resume_, stream, state); - } -} - -function resume_(stream, state) { - if (!state.reading) { - debug('resume read 0'); - stream.read(0); - } - - state.resumeScheduled = false; - state.awaitDrain = 0; - stream.emit('resume'); - flow(stream); - if (state.flowing && !state.reading) stream.read(0); -} - -Readable.prototype.pause = function () { - debug('call pause flowing=%j', this._readableState.flowing); - if (false !== this._readableState.flowing) { - debug('pause'); - this._readableState.flowing = false; - this.emit('pause'); - } - return this; -}; - -function flow(stream) { - var state = stream._readableState; - debug('flow', state.flowing); - while (state.flowing && stream.read() !== null) {} -} - -// wrap an old-style stream as the async data source. -// This is *not* part of the readable stream interface. -// It is an ugly unfortunate mess of history. -Readable.prototype.wrap = function (stream) { - var state = this._readableState; - var paused = false; - - var self = this; - stream.on('end', function () { - debug('wrapped end'); - if (state.decoder && !state.ended) { - var chunk = state.decoder.end(); - if (chunk && chunk.length) self.push(chunk); - } - - self.push(null); - }); - - stream.on('data', function (chunk) { - debug('wrapped data'); - if (state.decoder) chunk = state.decoder.write(chunk); - - // don't skip over falsy values in objectMode - if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; - - var ret = self.push(chunk); - if (!ret) { - paused = true; - stream.pause(); - } - }); - - // proxy all the other methods. - // important when wrapping filters and duplexes. - for (var i in stream) { - if (this[i] === undefined && typeof stream[i] === 'function') { - this[i] = function (method) { - return function () { - return stream[method].apply(stream, arguments); - }; - }(i); - } - } - - // proxy certain important events. - var events = ['error', 'close', 'destroy', 'pause', 'resume']; - forEach(events, function (ev) { - stream.on(ev, self.emit.bind(self, ev)); - }); - - // when we try to consume some more bytes, simply unpause the - // underlying stream. - self._read = function (n) { - debug('wrapped _read', n); - if (paused) { - paused = false; - stream.resume(); - } - }; - - return self; -}; - -// exposed for testing purposes only. -Readable._fromList = fromList; - -// Pluck off n bytes from an array of buffers. -// Length is the combined lengths of all the buffers in the list. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromList(n, state) { - // nothing buffered - if (state.length === 0) return null; - - var ret; - if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { - // read it all, truncate the list - if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); - state.buffer.clear(); - } else { - // read part of list - ret = fromListPartial(n, state.buffer, state.decoder); - } - - return ret; -} - -// Extracts only enough buffered data to satisfy the amount requested. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function fromListPartial(n, list, hasStrings) { - var ret; - if (n < list.head.data.length) { - // slice is the same for buffers and strings - ret = list.head.data.slice(0, n); - list.head.data = list.head.data.slice(n); - } else if (n === list.head.data.length) { - // first chunk is a perfect match - ret = list.shift(); - } else { - // result spans more than one buffer - ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); - } - return ret; -} - -// Copies a specified amount of characters from the list of buffered data -// chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBufferString(n, list) { - var p = list.head; - var c = 1; - var ret = p.data; - n -= ret.length; - while (p = p.next) { - var str = p.data; - var nb = n > str.length ? str.length : n; - if (nb === str.length) ret += str;else ret += str.slice(0, n); - n -= nb; - if (n === 0) { - if (nb === str.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = str.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; -} - -// Copies a specified amount of bytes from the list of buffered data chunks. -// This function is designed to be inlinable, so please take care when making -// changes to the function body. -function copyFromBuffer(n, list) { - var ret = bufferShim.allocUnsafe(n); - var p = list.head; - var c = 1; - p.data.copy(ret); - n -= p.data.length; - while (p = p.next) { - var buf = p.data; - var nb = n > buf.length ? buf.length : n; - buf.copy(ret, ret.length - n, 0, nb); - n -= nb; - if (n === 0) { - if (nb === buf.length) { - ++c; - if (p.next) list.head = p.next;else list.head = list.tail = null; - } else { - list.head = p; - p.data = buf.slice(nb); - } - break; - } - ++c; - } - list.length -= c; - return ret; -} - -function endReadable(stream) { - var state = stream._readableState; - - // If we get here before consuming all the bytes, then that is a - // bug in node. Should never happen. - if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); - - if (!state.endEmitted) { - state.ended = true; - processNextTick(endReadableNT, state, stream); - } -} - -function endReadableNT(state, stream) { - // Check that we didn't get one last unshift. - if (!state.endEmitted && state.length === 0) { - state.endEmitted = true; - stream.readable = false; - stream.emit('end'); - } -} - -function forEach(xs, f) { - for (var i = 0, l = xs.length; i < l; i++) { - f(xs[i], i); - } -} - -function indexOf(xs, x) { - for (var i = 0, l = xs.length; i < l; i++) { - if (xs[i] === x) return i; - } - return -1; -}
\ No newline at end of file diff --git a/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_transform.js deleted file mode 100644 index dbc996ede..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_transform.js +++ /dev/null @@ -1,180 +0,0 @@ -// a transform stream is a readable/writable stream where you do -// something with the data. Sometimes it's called a "filter", -// but that's not a great name for it, since that implies a thing where -// some bits pass through, and others are simply ignored. (That would -// be a valid example of a transform, of course.) -// -// While the output is causally related to the input, it's not a -// necessarily symmetric or synchronous transformation. For example, -// a zlib stream might take multiple plain-text writes(), and then -// emit a single compressed chunk some time in the future. -// -// Here's how this works: -// -// The Transform stream has all the aspects of the readable and writable -// stream classes. When you write(chunk), that calls _write(chunk,cb) -// internally, and returns false if there's a lot of pending writes -// buffered up. When you call read(), that calls _read(n) until -// there's enough pending readable data buffered up. -// -// In a transform stream, the written data is placed in a buffer. When -// _read(n) is called, it transforms the queued up data, calling the -// buffered _write cb's as it consumes chunks. If consuming a single -// written chunk would result in multiple output chunks, then the first -// outputted bit calls the readcb, and subsequent chunks just go into -// the read buffer, and will cause it to emit 'readable' if necessary. -// -// This way, back-pressure is actually determined by the reading side, -// since _read has to be called to start processing a new chunk. However, -// a pathological inflate type of transform can cause excessive buffering -// here. For example, imagine a stream where every byte of input is -// interpreted as an integer from 0-255, and then results in that many -// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in -// 1kb of data being output. In this case, you could write a very small -// amount of input, and end up with a very large amount of output. In -// such a pathological inflating mechanism, there'd be no way to tell -// the system to stop doing the transform. A single 4MB write could -// cause the system to run out of memory. -// -// However, even in such a pathological case, only a single written chunk -// would be consumed, and then the rest would wait (un-transformed) until -// the results of the previous transformed chunk were consumed. - -'use strict'; - -module.exports = Transform; - -var Duplex = require('./_stream_duplex'); - -/*<replacement>*/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/*</replacement>*/ - -util.inherits(Transform, Duplex); - -function TransformState(stream) { - this.afterTransform = function (er, data) { - return afterTransform(stream, er, data); - }; - - this.needTransform = false; - this.transforming = false; - this.writecb = null; - this.writechunk = null; - this.writeencoding = null; -} - -function afterTransform(stream, er, data) { - var ts = stream._transformState; - ts.transforming = false; - - var cb = ts.writecb; - - if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); - - ts.writechunk = null; - ts.writecb = null; - - if (data !== null && data !== undefined) stream.push(data); - - cb(er); - - var rs = stream._readableState; - rs.reading = false; - if (rs.needReadable || rs.length < rs.highWaterMark) { - stream._read(rs.highWaterMark); - } -} - -function Transform(options) { - if (!(this instanceof Transform)) return new Transform(options); - - Duplex.call(this, options); - - this._transformState = new TransformState(this); - - // when the writable side finishes, then flush out anything remaining. - var stream = this; - - // start out asking for a readable event once data is transformed. - this._readableState.needReadable = true; - - // we have implemented the _read method, and done the other things - // that Readable wants before the first _read call, so unset the - // sync guard flag. - this._readableState.sync = false; - - if (options) { - if (typeof options.transform === 'function') this._transform = options.transform; - - if (typeof options.flush === 'function') this._flush = options.flush; - } - - this.once('prefinish', function () { - if (typeof this._flush === 'function') this._flush(function (er) { - done(stream, er); - });else done(stream); - }); -} - -Transform.prototype.push = function (chunk, encoding) { - this._transformState.needTransform = false; - return Duplex.prototype.push.call(this, chunk, encoding); -}; - -// This is the part where you do stuff! -// override this function in implementation classes. -// 'chunk' is an input chunk. -// -// Call `push(newChunk)` to pass along transformed output -// to the readable side. You may call 'push' zero or more times. -// -// Call `cb(err)` when you are done with this chunk. If you pass -// an error, then that'll put the hurt on the whole operation. If you -// never call cb(), then you'll never get another chunk. -Transform.prototype._transform = function (chunk, encoding, cb) { - throw new Error('Not implemented'); -}; - -Transform.prototype._write = function (chunk, encoding, cb) { - var ts = this._transformState; - ts.writecb = cb; - ts.writechunk = chunk; - ts.writeencoding = encoding; - if (!ts.transforming) { - var rs = this._readableState; - if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); - } -}; - -// Doesn't matter what the args are here. -// _transform does all the work. -// That we got here means that the readable side wants more data. -Transform.prototype._read = function (n) { - var ts = this._transformState; - - if (ts.writechunk !== null && ts.writecb && !ts.transforming) { - ts.transforming = true; - this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); - } else { - // mark that we need a transform, so that any data that comes in - // will get processed, now that we've asked for it. - ts.needTransform = true; - } -}; - -function done(stream, er) { - if (er) return stream.emit('error', er); - - // if there's nothing in the write buffer, then that means - // that nothing more will ever be provided - var ws = stream._writableState; - var ts = stream._transformState; - - if (ws.length) throw new Error('Calling transform done when ws.length != 0'); - - if (ts.transforming) throw new Error('Calling transform done when still transforming'); - - return stream.push(null); -}
\ No newline at end of file diff --git a/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_writable.js deleted file mode 100644 index ed5efcbd2..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/lib/_stream_writable.js +++ /dev/null @@ -1,526 +0,0 @@ -// A bit simpler than readable streams. -// Implement an async ._write(chunk, encoding, cb), and it'll handle all -// the drain event emission and buffering. - -'use strict'; - -module.exports = Writable; - -/*<replacement>*/ -var processNextTick = require('process-nextick-args'); -/*</replacement>*/ - -/*<replacement>*/ -var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : processNextTick; -/*</replacement>*/ - -Writable.WritableState = WritableState; - -/*<replacement>*/ -var util = require('core-util-is'); -util.inherits = require('inherits'); -/*</replacement>*/ - -/*<replacement>*/ -var internalUtil = { - deprecate: require('util-deprecate') -}; -/*</replacement>*/ - -/*<replacement>*/ -var Stream; -(function () { - try { - Stream = require('st' + 'ream'); - } catch (_) {} finally { - if (!Stream) Stream = require('events').EventEmitter; - } -})(); -/*</replacement>*/ - -var Buffer = require('buffer').Buffer; -/*<replacement>*/ -var bufferShim = require('buffer-shims'); -/*</replacement>*/ - -util.inherits(Writable, Stream); - -function nop() {} - -function WriteReq(chunk, encoding, cb) { - this.chunk = chunk; - this.encoding = encoding; - this.callback = cb; - this.next = null; -} - -var Duplex; -function WritableState(options, stream) { - Duplex = Duplex || require('./_stream_duplex'); - - options = options || {}; - - // object stream flag to indicate whether or not this stream - // contains buffers or objects. - this.objectMode = !!options.objectMode; - - if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; - - // the point at which write() starts returning false - // Note: 0 is a valid value, means that we always return false if - // the entire buffer is not flushed immediately on write() - var hwm = options.highWaterMark; - var defaultHwm = this.objectMode ? 16 : 16 * 1024; - this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm; - - // cast to ints. - this.highWaterMark = ~ ~this.highWaterMark; - - this.needDrain = false; - // at the start of calling end() - this.ending = false; - // when end() has been called, and returned - this.ended = false; - // when 'finish' is emitted - this.finished = false; - - // should we decode strings into buffers before passing to _write? - // this is here so that some node-core streams can optimize string - // handling at a lower level. - var noDecode = options.decodeStrings === false; - this.decodeStrings = !noDecode; - - // Crypto is kind of old and crusty. Historically, its default string - // encoding is 'binary' so we have to make this configurable. - // Everything else in the universe uses 'utf8', though. - this.defaultEncoding = options.defaultEncoding || 'utf8'; - - // not an actual buffer we keep track of, but a measurement - // of how much we're waiting to get pushed to some underlying - // socket or file. - this.length = 0; - - // a flag to see when we're in the middle of a write. - this.writing = false; - - // when true all writes will be buffered until .uncork() call - this.corked = 0; - - // a flag to be able to tell if the onwrite cb is called immediately, - // or on a later tick. We set this to true at first, because any - // actions that shouldn't happen until "later" should generally also - // not happen before the first write call. - this.sync = true; - - // a flag to know if we're processing previously buffered items, which - // may call the _write() callback in the same tick, so that we don't - // end up in an overlapped onwrite situation. - this.bufferProcessing = false; - - // the callback that's passed to _write(chunk,cb) - this.onwrite = function (er) { - onwrite(stream, er); - }; - - // the callback that the user supplies to write(chunk,encoding,cb) - this.writecb = null; - - // the amount that is being written when _write is called. - this.writelen = 0; - - this.bufferedRequest = null; - this.lastBufferedRequest = null; - - // number of pending user-supplied write callbacks - // this must be 0 before 'finish' can be emitted - this.pendingcb = 0; - - // emit prefinish if the only thing we're waiting for is _write cbs - // This is relevant for synchronous Transform streams - this.prefinished = false; - - // True if the error was already emitted and should not be thrown again - this.errorEmitted = false; - - // count buffered requests - this.bufferedRequestCount = 0; - - // allocate the first CorkedRequest, there is always - // one allocated and free to use, and we maintain at most two - this.corkedRequestsFree = new CorkedRequest(this); -} - -WritableState.prototype.getBuffer = function writableStateGetBuffer() { - var current = this.bufferedRequest; - var out = []; - while (current) { - out.push(current); - current = current.next; - } - return out; -}; - -(function () { - try { - Object.defineProperty(WritableState.prototype, 'buffer', { - get: internalUtil.deprecate(function () { - return this.getBuffer(); - }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.') - }); - } catch (_) {} -})(); - -var Duplex; -function Writable(options) { - Duplex = Duplex || require('./_stream_duplex'); - - // Writable ctor is applied to Duplexes, though they're not - // instanceof Writable, they're instanceof Readable. - if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); - - this._writableState = new WritableState(options, this); - - // legacy. - this.writable = true; - - if (options) { - if (typeof options.write === 'function') this._write = options.write; - - if (typeof options.writev === 'function') this._writev = options.writev; - } - - Stream.call(this); -} - -// Otherwise people can pipe Writable streams, which is just wrong. -Writable.prototype.pipe = function () { - this.emit('error', new Error('Cannot pipe, not readable')); -}; - -function writeAfterEnd(stream, cb) { - var er = new Error('write after end'); - // TODO: defer error events consistently everywhere, not just the cb - stream.emit('error', er); - processNextTick(cb, er); -} - -// If we get something that is not a buffer, string, null, or undefined, -// and we're not in objectMode, then that's an error. -// Otherwise stream chunks are all considered to be of length=1, and the -// watermarks determine how many objects to keep in the buffer, rather than -// how many bytes or characters. -function validChunk(stream, state, chunk, cb) { - var valid = true; - var er = false; - // Always throw error if a null is written - // if we are not in object mode then throw - // if it is not a buffer, string, or undefined. - if (chunk === null) { - er = new TypeError('May not write null values to stream'); - } else if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { - er = new TypeError('Invalid non-string/buffer chunk'); - } - if (er) { - stream.emit('error', er); - processNextTick(cb, er); - valid = false; - } - return valid; -} - -Writable.prototype.write = function (chunk, encoding, cb) { - var state = this._writableState; - var ret = false; - - if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; - - if (typeof cb !== 'function') cb = nop; - - if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) { - state.pendingcb++; - ret = writeOrBuffer(this, state, chunk, encoding, cb); - } - - return ret; -}; - -Writable.prototype.cork = function () { - var state = this._writableState; - - state.corked++; -}; - -Writable.prototype.uncork = function () { - var state = this._writableState; - - if (state.corked) { - state.corked--; - - if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); - } -}; - -Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { - // node::ParseEncoding() requires lower case. - if (typeof encoding === 'string') encoding = encoding.toLowerCase(); - if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); - this._writableState.defaultEncoding = encoding; - return this; -}; - -function decodeChunk(state, chunk, encoding) { - if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { - chunk = bufferShim.from(chunk, encoding); - } - return chunk; -} - -// if we're already writing something, then just put this -// in the queue, and wait our turn. Otherwise, call _write -// If we return false, then we need a drain event, so set that flag. -function writeOrBuffer(stream, state, chunk, encoding, cb) { - chunk = decodeChunk(state, chunk, encoding); - - if (Buffer.isBuffer(chunk)) encoding = 'buffer'; - var len = state.objectMode ? 1 : chunk.length; - - state.length += len; - - var ret = state.length < state.highWaterMark; - // we must ensure that previous needDrain will not be reset to false. - if (!ret) state.needDrain = true; - - if (state.writing || state.corked) { - var last = state.lastBufferedRequest; - state.lastBufferedRequest = new WriteReq(chunk, encoding, cb); - if (last) { - last.next = state.lastBufferedRequest; - } else { - state.bufferedRequest = state.lastBufferedRequest; - } - state.bufferedRequestCount += 1; - } else { - doWrite(stream, state, false, len, chunk, encoding, cb); - } - - return ret; -} - -function doWrite(stream, state, writev, len, chunk, encoding, cb) { - state.writelen = len; - state.writecb = cb; - state.writing = true; - state.sync = true; - if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); - state.sync = false; -} - -function onwriteError(stream, state, sync, er, cb) { - --state.pendingcb; - if (sync) processNextTick(cb, er);else cb(er); - - stream._writableState.errorEmitted = true; - stream.emit('error', er); -} - -function onwriteStateUpdate(state) { - state.writing = false; - state.writecb = null; - state.length -= state.writelen; - state.writelen = 0; -} - -function onwrite(stream, er) { - var state = stream._writableState; - var sync = state.sync; - var cb = state.writecb; - - onwriteStateUpdate(state); - - if (er) onwriteError(stream, state, sync, er, cb);else { - // Check if we're actually ready to finish, but don't emit yet - var finished = needFinish(state); - - if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { - clearBuffer(stream, state); - } - - if (sync) { - /*<replacement>*/ - asyncWrite(afterWrite, stream, state, finished, cb); - /*</replacement>*/ - } else { - afterWrite(stream, state, finished, cb); - } - } -} - -function afterWrite(stream, state, finished, cb) { - if (!finished) onwriteDrain(stream, state); - state.pendingcb--; - cb(); - finishMaybe(stream, state); -} - -// Must force callback to be called on nextTick, so that we don't -// emit 'drain' before the write() consumer gets the 'false' return -// value, and has a chance to attach a 'drain' listener. -function onwriteDrain(stream, state) { - if (state.length === 0 && state.needDrain) { - state.needDrain = false; - stream.emit('drain'); - } -} - -// if there's something in the buffer waiting, then process it -function clearBuffer(stream, state) { - state.bufferProcessing = true; - var entry = state.bufferedRequest; - - if (stream._writev && entry && entry.next) { - // Fast case, write everything using _writev() - var l = state.bufferedRequestCount; - var buffer = new Array(l); - var holder = state.corkedRequestsFree; - holder.entry = entry; - - var count = 0; - while (entry) { - buffer[count] = entry; - entry = entry.next; - count += 1; - } - - doWrite(stream, state, true, state.length, buffer, '', holder.finish); - - // doWrite is almost always async, defer these to save a bit of time - // as the hot path ends with doWrite - state.pendingcb++; - state.lastBufferedRequest = null; - if (holder.next) { - state.corkedRequestsFree = holder.next; - holder.next = null; - } else { - state.corkedRequestsFree = new CorkedRequest(state); - } - } else { - // Slow case, write chunks one-by-one - while (entry) { - var chunk = entry.chunk; - var encoding = entry.encoding; - var cb = entry.callback; - var len = state.objectMode ? 1 : chunk.length; - - doWrite(stream, state, false, len, chunk, encoding, cb); - entry = entry.next; - // if we didn't call the onwrite immediately, then - // it means that we need to wait until it does. - // also, that means that the chunk and cb are currently - // being processed, so move the buffer counter past them. - if (state.writing) { - break; - } - } - - if (entry === null) state.lastBufferedRequest = null; - } - - state.bufferedRequestCount = 0; - state.bufferedRequest = entry; - state.bufferProcessing = false; -} - -Writable.prototype._write = function (chunk, encoding, cb) { - cb(new Error('not implemented')); -}; - -Writable.prototype._writev = null; - -Writable.prototype.end = function (chunk, encoding, cb) { - var state = this._writableState; - - if (typeof chunk === 'function') { - cb = chunk; - chunk = null; - encoding = null; - } else if (typeof encoding === 'function') { - cb = encoding; - encoding = null; - } - - if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); - - // .end() fully uncorks - if (state.corked) { - state.corked = 1; - this.uncork(); - } - - // ignore unnecessary end() calls. - if (!state.ending && !state.finished) endWritable(this, state, cb); -}; - -function needFinish(state) { - return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; -} - -function prefinish(stream, state) { - if (!state.prefinished) { - state.prefinished = true; - stream.emit('prefinish'); - } -} - -function finishMaybe(stream, state) { - var need = needFinish(state); - if (need) { - if (state.pendingcb === 0) { - prefinish(stream, state); - state.finished = true; - stream.emit('finish'); - } else { - prefinish(stream, state); - } - } - return need; -} - -function endWritable(stream, state, cb) { - state.ending = true; - finishMaybe(stream, state); - if (cb) { - if (state.finished) processNextTick(cb);else stream.once('finish', cb); - } - state.ended = true; - stream.writable = false; -} - -// It seems a linked list but it is not -// there will be only 2 of these for each stream -function CorkedRequest(state) { - var _this = this; - - this.next = null; - this.entry = null; - - this.finish = function (err) { - var entry = _this.entry; - _this.entry = null; - while (entry) { - var cb = entry.callback; - state.pendingcb--; - cb(err); - entry = entry.next; - } - if (state.corkedRequestsFree) { - state.corkedRequestsFree.next = _this; - } else { - state.corkedRequestsFree = _this; - } - }; -}
\ No newline at end of file diff --git a/node_modules/merge-stream/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/merge-stream/node_modules/readable-stream/lib/internal/streams/BufferList.js deleted file mode 100644 index e4bfcf02d..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/lib/internal/streams/BufferList.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict'; - -var Buffer = require('buffer').Buffer; -/*<replacement>*/ -var bufferShim = require('buffer-shims'); -/*</replacement>*/ - -module.exports = BufferList; - -function BufferList() { - this.head = null; - this.tail = null; - this.length = 0; -} - -BufferList.prototype.push = function (v) { - var entry = { data: v, next: null }; - if (this.length > 0) this.tail.next = entry;else this.head = entry; - this.tail = entry; - ++this.length; -}; - -BufferList.prototype.unshift = function (v) { - var entry = { data: v, next: this.head }; - if (this.length === 0) this.tail = entry; - this.head = entry; - ++this.length; -}; - -BufferList.prototype.shift = function () { - if (this.length === 0) return; - var ret = this.head.data; - if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; - --this.length; - return ret; -}; - -BufferList.prototype.clear = function () { - this.head = this.tail = null; - this.length = 0; -}; - -BufferList.prototype.join = function (s) { - if (this.length === 0) return ''; - var p = this.head; - var ret = '' + p.data; - while (p = p.next) { - ret += s + p.data; - }return ret; -}; - -BufferList.prototype.concat = function (n) { - if (this.length === 0) return bufferShim.alloc(0); - if (this.length === 1) return this.head.data; - var ret = bufferShim.allocUnsafe(n >>> 0); - var p = this.head; - var i = 0; - while (p) { - p.data.copy(ret, i); - i += p.data.length; - p = p.next; - } - return ret; -};
\ No newline at end of file diff --git a/node_modules/merge-stream/node_modules/readable-stream/package.json b/node_modules/merge-stream/node_modules/readable-stream/package.json deleted file mode 100644 index 97784cf8d..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/package.json +++ /dev/null @@ -1,125 +0,0 @@ -{ - "_args": [ - [ - { - "raw": "readable-stream@^2.0.1", - "scope": null, - "escapedName": "readable-stream", - "name": "readable-stream", - "rawSpec": "^2.0.1", - "spec": ">=2.0.1 <3.0.0", - "type": "range" - }, - "/home/dold/repos/taler/wallet-webex/node_modules/merge-stream" - ] - ], - "_from": "readable-stream@>=2.0.1 <3.0.0", - "_id": "readable-stream@2.1.5", - "_inCache": true, - "_location": "/merge-stream/readable-stream", - "_nodeVersion": "5.12.0", - "_npmOperationalInternal": { - "host": "packages-16-east.internal.npmjs.com", - "tmp": "tmp/readable-stream-2.1.5.tgz_1471463532993_0.15824943827465177" - }, - "_npmUser": { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - }, - "_npmVersion": "3.8.6", - "_phantomChildren": {}, - "_requested": { - "raw": "readable-stream@^2.0.1", - "scope": null, - "escapedName": "readable-stream", - "name": "readable-stream", - "rawSpec": "^2.0.1", - "spec": ">=2.0.1 <3.0.0", - "type": "range" - }, - "_requiredBy": [ - "/merge-stream" - ], - "_resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.1.5.tgz", - "_shasum": "66fa8b720e1438b364681f2ad1a63c618448c9d0", - "_shrinkwrap": null, - "_spec": "readable-stream@^2.0.1", - "_where": "/home/dold/repos/taler/wallet-webex/node_modules/merge-stream", - "browser": { - "util": false - }, - "bugs": { - "url": "https://github.com/nodejs/readable-stream/issues" - }, - "dependencies": { - "buffer-shims": "^1.0.0", - "core-util-is": "~1.0.0", - "inherits": "~2.0.1", - "isarray": "~1.0.0", - "process-nextick-args": "~1.0.6", - "string_decoder": "~0.10.x", - "util-deprecate": "~1.0.1" - }, - "description": "Streams3, a user-land copy of the stream library from Node.js", - "devDependencies": { - "assert": "~1.4.0", - "babel-polyfill": "^6.9.1", - "nyc": "^6.4.0", - "tap": "~0.7.1", - "tape": "~4.5.1", - "zuul": "~3.10.0" - }, - "directories": {}, - "dist": { - "shasum": "66fa8b720e1438b364681f2ad1a63c618448c9d0", - "tarball": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.1.5.tgz" - }, - "gitHead": "758c8b3845af855fde736b6a7f58a15fba00d1e7", - "homepage": "https://github.com/nodejs/readable-stream#readme", - "keywords": [ - "readable", - "stream", - "pipe" - ], - "license": "MIT", - "main": "readable.js", - "maintainers": [ - { - "name": "isaacs", - "email": "isaacs@npmjs.com" - }, - { - "name": "tootallnate", - "email": "nathan@tootallnate.net" - }, - { - "name": "rvagg", - "email": "rod@vagg.org" - }, - { - "name": "cwmma", - "email": "calvin.metcalf@gmail.com" - } - ], - "name": "readable-stream", - "nyc": { - "include": [ - "lib/**.js" - ] - }, - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git://github.com/nodejs/readable-stream.git" - }, - "scripts": { - "browser": "npm run write-zuul && zuul --browser-retries 2 -- test/browser.js", - "cover": "nyc npm test", - "local": "zuul --local 3000 --no-coverage -- test/browser.js", - "report": "nyc report --reporter=lcov", - "test": "tap test/parallel/*.js test/ours/*.js", - "write-zuul": "printf \"ui: tape\nbrowsers:\n - name: $BROWSER_NAME\n version: $BROWSER_VERSION\n\">.zuul.yml" - }, - "version": "2.1.5" -} diff --git a/node_modules/merge-stream/node_modules/readable-stream/passthrough.js b/node_modules/merge-stream/node_modules/readable-stream/passthrough.js deleted file mode 100644 index 27e8d8a55..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/passthrough.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_passthrough.js") diff --git a/node_modules/merge-stream/node_modules/readable-stream/readable.js b/node_modules/merge-stream/node_modules/readable-stream/readable.js deleted file mode 100644 index be2688a07..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/readable.js +++ /dev/null @@ -1,16 +0,0 @@ -var Stream = (function (){ - try { - return require('st' + 'ream'); // hack to fix a circular dependency issue when used with browserify - } catch(_){} -}()); -exports = module.exports = require('./lib/_stream_readable.js'); -exports.Stream = Stream || exports; -exports.Readable = exports; -exports.Writable = require('./lib/_stream_writable.js'); -exports.Duplex = require('./lib/_stream_duplex.js'); -exports.Transform = require('./lib/_stream_transform.js'); -exports.PassThrough = require('./lib/_stream_passthrough.js'); - -if (!process.browser && process.env.READABLE_STREAM === 'disable' && Stream) { - module.exports = Stream; -} diff --git a/node_modules/merge-stream/node_modules/readable-stream/transform.js b/node_modules/merge-stream/node_modules/readable-stream/transform.js deleted file mode 100644 index 5d482f078..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/transform.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_transform.js") diff --git a/node_modules/merge-stream/node_modules/readable-stream/writable.js b/node_modules/merge-stream/node_modules/readable-stream/writable.js deleted file mode 100644 index e1e9efdf3..000000000 --- a/node_modules/merge-stream/node_modules/readable-stream/writable.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = require("./lib/_stream_writable.js") diff --git a/node_modules/merge-stream/package.json b/node_modules/merge-stream/package.json index 5ca9340ed..47b6a86fe 100644 --- a/node_modules/merge-stream/package.json +++ b/node_modules/merge-stream/package.json @@ -1,99 +1,22 @@ { - "_args": [ - [ - { - "raw": "merge-stream@^1.0.0", - "scope": null, - "escapedName": "merge-stream", - "name": "merge-stream", - "rawSpec": "^1.0.0", - "spec": ">=1.0.0 <2.0.0", - "type": "range" - }, - "/home/dold/repos/taler/wallet-webex/node_modules/vinyl-fs" - ] - ], - "_from": "merge-stream@>=1.0.0 <2.0.0", - "_id": "merge-stream@1.0.0", - "_inCache": true, - "_location": "/merge-stream", - "_nodeVersion": "2.4.0", - "_npmUser": { - "name": "shinnn", - "email": "snnskwtnb@gmail.com" - }, - "_npmVersion": "2.13.1", - "_phantomChildren": { - "buffer-shims": "1.0.0", - "core-util-is": "1.0.2", - "inherits": "2.0.3", - "process-nextick-args": "1.0.7", - "string_decoder": "0.10.31", - "util-deprecate": "1.0.2" - }, - "_requested": { - "raw": "merge-stream@^1.0.0", - "scope": null, - "escapedName": "merge-stream", - "name": "merge-stream", - "rawSpec": "^1.0.0", - "spec": ">=1.0.0 <2.0.0", - "type": "range" - }, - "_requiredBy": [ - "/vinyl-fs" + "name": "merge-stream", + "version": "1.0.0", + "description": "Create a stream that emits events from multiple other streams", + "main": "index.js", + "files": [ + "index.js" ], - "_resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-1.0.0.tgz", - "_shasum": "9cfd156fef35421e2b5403ce11dc6eb1962b026e", - "_shrinkwrap": null, - "_spec": "merge-stream@^1.0.0", - "_where": "/home/dold/repos/taler/wallet-webex/node_modules/vinyl-fs", - "author": { - "name": "Stephen Sugden", - "email": "me@stephensugden.com" - }, - "bugs": { - "url": "https://github.com/grncdr/merge-stream/issues" + "scripts": { + "test": "istanbul cover test.js && istanbul check-cover --statements 100 --branches 100" }, + "repository": "grncdr/merge-stream", + "author": "Stephen Sugden <me@stephensugden.com>", + "license": "MIT", "dependencies": { "readable-stream": "^2.0.1" }, - "description": "Create a stream that emits events from multiple other streams", "devDependencies": { "from2": "^2.0.3", "istanbul": "^0.3.2" - }, - "directories": {}, - "dist": { - "shasum": "9cfd156fef35421e2b5403ce11dc6eb1962b026e", - "tarball": "https://registry.npmjs.org/merge-stream/-/merge-stream-1.0.0.tgz" - }, - "files": [ - "index.js" - ], - "gitHead": "e973cf43ef0edda5d4e3b08b07040d4039822734", - "homepage": "https://github.com/grncdr/merge-stream#readme", - "license": "MIT", - "main": "index.js", - "maintainers": [ - { - "name": "grncdr", - "email": "glurgle@gmail.com" - }, - { - "name": "shinnn", - "email": "snnskwtnb@gmail.com" - } - ], - "name": "merge-stream", - "optionalDependencies": {}, - "readme": "ERROR: No README data found!", - "repository": { - "type": "git", - "url": "git+https://github.com/grncdr/merge-stream.git" - }, - "scripts": { - "test": "istanbul cover test.js && istanbul check-cover --statements 100 --branches 100" - }, - "version": "1.0.0" + } } |