UNPKG

27.2 kBJavaScriptView Raw
1'use strict';
2
3Readable.ReadableState = ReadableState;
4import EventEmitter from 'events';
5import {inherits, debuglog} from 'util';
6import BufferList from './buffer-list';
7import {StringDecoder} from 'string_decoder';
8import {Duplex} from './duplex';
9import {nextTick} from 'process';
10
11var debug = debuglog('stream');
12inherits(Readable, EventEmitter);
13
14function prependListener(emitter, event, fn) {
15 // Sadly this is not cacheable as some libraries bundle their own
16 // event emitter implementation with them.
17 if (typeof emitter.prependListener === 'function') {
18 return emitter.prependListener(event, fn);
19 } else {
20 // This is a hack to make sure that our error handler is attached before any
21 // userland ones. NEVER DO THIS. This is here only because this code needs
22 // to continue to work with older versions of Node.js that do not include
23 // the prependListener() method. The goal is to eventually remove this hack.
24 if (!emitter._events || !emitter._events[event])
25 emitter.on(event, fn);
26 else if (Array.isArray(emitter._events[event]))
27 emitter._events[event].unshift(fn);
28 else
29 emitter._events[event] = [fn, emitter._events[event]];
30 }
31}
32function listenerCount (emitter, type) {
33 return emitter.listeners(type).length;
34}
35function ReadableState(options, stream) {
36
37 options = options || {};
38
39 // object stream flag. Used to make read(n) ignore n and to
40 // make all the buffer merging and length checks go away
41 this.objectMode = !!options.objectMode;
42
43 if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
44
45 // the point at which it stops calling _read() to fill the buffer
46 // Note: 0 is a valid value, means "don't call _read preemptively ever"
47 var hwm = options.highWaterMark;
48 var defaultHwm = this.objectMode ? 16 : 16 * 1024;
49 this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
50
51 // cast to ints.
52 this.highWaterMark = ~ ~this.highWaterMark;
53
54 // A linked list is used to store data chunks instead of an array because the
55 // linked list can remove elements from the beginning faster than
56 // array.shift()
57 this.buffer = new BufferList();
58 this.length = 0;
59 this.pipes = null;
60 this.pipesCount = 0;
61 this.flowing = null;
62 this.ended = false;
63 this.endEmitted = false;
64 this.reading = false;
65
66 // a flag to be able to tell if the onwrite cb is called immediately,
67 // or on a later tick. We set this to true at first, because any
68 // actions that shouldn't happen until "later" should generally also
69 // not happen before the first write call.
70 this.sync = true;
71
72 // whenever we return null, then we set a flag to say
73 // that we're awaiting a 'readable' event emission.
74 this.needReadable = false;
75 this.emittedReadable = false;
76 this.readableListening = false;
77 this.resumeScheduled = false;
78
79 // Crypto is kind of old and crusty. Historically, its default string
80 // encoding is 'binary' so we have to make this configurable.
81 // Everything else in the universe uses 'utf8', though.
82 this.defaultEncoding = options.defaultEncoding || 'utf8';
83
84 // when piping, we only care about 'readable' events that happen
85 // after read()ing all the bytes and not getting any pushback.
86 this.ranOut = false;
87
88 // the number of writers that are awaiting a drain event in .pipe()s
89 this.awaitDrain = 0;
90
91 // if true, a maybeReadMore has been scheduled
92 this.readingMore = false;
93
94 this.decoder = null;
95 this.encoding = null;
96 if (options.encoding) {
97 this.decoder = new StringDecoder(options.encoding);
98 this.encoding = options.encoding;
99 }
100}
101export default Readable;
102export function Readable(options) {
103
104 if (!(this instanceof Readable)) return new Readable(options);
105
106 this._readableState = new ReadableState(options, this);
107
108 // legacy
109 this.readable = true;
110
111 if (options && typeof options.read === 'function') this._read = options.read;
112
113 EventEmitter.call(this);
114}
115
116// Manually shove something into the read() buffer.
117// This returns true if the highWaterMark has not been hit yet,
118// similar to how Writable.write() returns true if you should
119// write() some more.
120Readable.prototype.push = function (chunk, encoding) {
121 var state = this._readableState;
122
123 if (!state.objectMode && typeof chunk === 'string') {
124 encoding = encoding || state.defaultEncoding;
125 if (encoding !== state.encoding) {
126 chunk = Buffer.from(chunk, encoding);
127 encoding = '';
128 }
129 }
130
131 return readableAddChunk(this, state, chunk, encoding, false);
132};
133
134// Unshift should *always* be something directly out of read()
135Readable.prototype.unshift = function (chunk) {
136 var state = this._readableState;
137 return readableAddChunk(this, state, chunk, '', true);
138};
139
140Readable.prototype.isPaused = function () {
141 return this._readableState.flowing === false;
142};
143
144function readableAddChunk(stream, state, chunk, encoding, addToFront) {
145 var er = chunkInvalid(state, chunk);
146 if (er) {
147 stream.emit('error', er);
148 } else if (chunk === null) {
149 state.reading = false;
150 onEofChunk(stream, state);
151 } else if (state.objectMode || chunk && chunk.length > 0) {
152 if (state.ended && !addToFront) {
153 var e = new Error('stream.push() after EOF');
154 stream.emit('error', e);
155 } else if (state.endEmitted && addToFront) {
156 var _e = new Error('stream.unshift() after end event');
157 stream.emit('error', _e);
158 } else {
159 var skipAdd;
160 if (state.decoder && !addToFront && !encoding) {
161 chunk = state.decoder.write(chunk);
162 skipAdd = !state.objectMode && chunk.length === 0;
163 }
164
165 if (!addToFront) state.reading = false;
166
167 // Don't add to the buffer if we've decoded to an empty string chunk and
168 // we're not in object mode
169 if (!skipAdd) {
170 // if we want the data now, just emit it.
171 if (state.flowing && state.length === 0 && !state.sync) {
172 stream.emit('data', chunk);
173 stream.read(0);
174 } else {
175 // update the buffer info.
176 state.length += state.objectMode ? 1 : chunk.length;
177 if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
178
179 if (state.needReadable) emitReadable(stream);
180 }
181 }
182
183 maybeReadMore(stream, state);
184 }
185 } else if (!addToFront) {
186 state.reading = false;
187 }
188
189 return needMoreData(state);
190}
191
192// if it's past the high water mark, we can push in some more.
193// Also, if we have no data yet, we can stand some
194// more bytes. This is to work around cases where hwm=0,
195// such as the repl. Also, if the push() triggered a
196// readable event, and the user called read(largeNumber) such that
197// needReadable was set, then we ought to push more, so that another
198// 'readable' event will be triggered.
199function needMoreData(state) {
200 return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
201}
202
203// backwards compatibility.
204Readable.prototype.setEncoding = function (enc) {
205 this._readableState.decoder = new StringDecoder(enc);
206 this._readableState.encoding = enc;
207 return this;
208};
209
210// Don't raise the hwm > 8MB
211var MAX_HWM = 0x800000;
212function computeNewHighWaterMark(n) {
213 if (n >= MAX_HWM) {
214 n = MAX_HWM;
215 } else {
216 // Get the next highest power of 2 to prevent increasing hwm excessively in
217 // tiny amounts
218 n--;
219 n |= n >>> 1;
220 n |= n >>> 2;
221 n |= n >>> 4;
222 n |= n >>> 8;
223 n |= n >>> 16;
224 n++;
225 }
226 return n;
227}
228
229// This function is designed to be inlinable, so please take care when making
230// changes to the function body.
231function howMuchToRead(n, state) {
232 if (n <= 0 || state.length === 0 && state.ended) return 0;
233 if (state.objectMode) return 1;
234 if (n !== n) {
235 // Only flow one buffer at a time
236 if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
237 }
238 // If we're asking for more than the current hwm, then raise the hwm.
239 if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
240 if (n <= state.length) return n;
241 // Don't have enough
242 if (!state.ended) {
243 state.needReadable = true;
244 return 0;
245 }
246 return state.length;
247}
248
249// you can override either this method, or the async _read(n) below.
250Readable.prototype.read = function (n) {
251 debug('read', n);
252 n = parseInt(n, 10);
253 var state = this._readableState;
254 var nOrig = n;
255
256 if (n !== 0) state.emittedReadable = false;
257
258 // if we're doing read(0) to trigger a readable event, but we
259 // already have a bunch of data in the buffer, then just trigger
260 // the 'readable' event and move on.
261 if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
262 debug('read: emitReadable', state.length, state.ended);
263 if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
264 return null;
265 }
266
267 n = howMuchToRead(n, state);
268
269 // if we've ended, and we're now clear, then finish it up.
270 if (n === 0 && state.ended) {
271 if (state.length === 0) endReadable(this);
272 return null;
273 }
274
275 // All the actual chunk generation logic needs to be
276 // *below* the call to _read. The reason is that in certain
277 // synthetic stream cases, such as passthrough streams, _read
278 // may be a completely synchronous operation which may change
279 // the state of the read buffer, providing enough data when
280 // before there was *not* enough.
281 //
282 // So, the steps are:
283 // 1. Figure out what the state of things will be after we do
284 // a read from the buffer.
285 //
286 // 2. If that resulting state will trigger a _read, then call _read.
287 // Note that this may be asynchronous, or synchronous. Yes, it is
288 // deeply ugly to write APIs this way, but that still doesn't mean
289 // that the Readable class should behave improperly, as streams are
290 // designed to be sync/async agnostic.
291 // Take note if the _read call is sync or async (ie, if the read call
292 // has returned yet), so that we know whether or not it's safe to emit
293 // 'readable' etc.
294 //
295 // 3. Actually pull the requested chunks out of the buffer and return.
296
297 // if we need a readable event, then we need to do some reading.
298 var doRead = state.needReadable;
299 debug('need readable', doRead);
300
301 // if we currently have less than the highWaterMark, then also read some
302 if (state.length === 0 || state.length - n < state.highWaterMark) {
303 doRead = true;
304 debug('length less than watermark', doRead);
305 }
306
307 // however, if we've ended, then there's no point, and if we're already
308 // reading, then it's unnecessary.
309 if (state.ended || state.reading) {
310 doRead = false;
311 debug('reading or ended', doRead);
312 } else if (doRead) {
313 debug('do read');
314 state.reading = true;
315 state.sync = true;
316 // if the length is currently zero, then we *need* a readable event.
317 if (state.length === 0) state.needReadable = true;
318 // call internal read method
319 this._read(state.highWaterMark);
320 state.sync = false;
321 // If _read pushed data synchronously, then `reading` will be false,
322 // and we need to re-evaluate how much data we can return to the user.
323 if (!state.reading) n = howMuchToRead(nOrig, state);
324 }
325
326 var ret;
327 if (n > 0) ret = fromList(n, state);else ret = null;
328
329 if (ret === null) {
330 state.needReadable = true;
331 n = 0;
332 } else {
333 state.length -= n;
334 }
335
336 if (state.length === 0) {
337 // If we have nothing in the buffer, then we want to know
338 // as soon as we *do* get something into the buffer.
339 if (!state.ended) state.needReadable = true;
340
341 // If we tried to read() past the EOF, then emit end on the next tick.
342 if (nOrig !== n && state.ended) endReadable(this);
343 }
344
345 if (ret !== null) this.emit('data', ret);
346
347 return ret;
348};
349
350function chunkInvalid(state, chunk) {
351 var er = null;
352 if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) {
353 er = new TypeError('Invalid non-string/buffer chunk');
354 }
355 return er;
356}
357
358function onEofChunk(stream, state) {
359 if (state.ended) return;
360 if (state.decoder) {
361 var chunk = state.decoder.end();
362 if (chunk && chunk.length) {
363 state.buffer.push(chunk);
364 state.length += state.objectMode ? 1 : chunk.length;
365 }
366 }
367 state.ended = true;
368
369 // emit 'readable' now to make sure it gets picked up.
370 emitReadable(stream);
371}
372
373// Don't emit readable right away in sync mode, because this can trigger
374// another read() call => stack overflow. This way, it might trigger
375// a nextTick recursion warning, but that's not so bad.
376function emitReadable(stream) {
377 var state = stream._readableState;
378 state.needReadable = false;
379 if (!state.emittedReadable) {
380 debug('emitReadable', state.flowing);
381 state.emittedReadable = true;
382 if (state.sync) nextTick(emitReadable_, stream);else emitReadable_(stream);
383 }
384}
385
386function emitReadable_(stream) {
387 debug('emit readable');
388 stream.emit('readable');
389 flow(stream);
390}
391
392// at this point, the user has presumably seen the 'readable' event,
393// and called read() to consume some data. that may have triggered
394// in turn another _read(n) call, in which case reading = true if
395// it's in progress.
396// However, if we're not ended, or reading, and the length < hwm,
397// then go ahead and try to read some more preemptively.
398function maybeReadMore(stream, state) {
399 if (!state.readingMore) {
400 state.readingMore = true;
401 nextTick(maybeReadMore_, stream, state);
402 }
403}
404
405function maybeReadMore_(stream, state) {
406 var len = state.length;
407 while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
408 debug('maybeReadMore read 0');
409 stream.read(0);
410 if (len === state.length)
411 // didn't get any data, stop spinning.
412 break;else len = state.length;
413 }
414 state.readingMore = false;
415}
416
417// abstract method. to be overridden in specific implementation classes.
418// call cb(er, data) where data is <= n in length.
419// for virtual (non-string, non-buffer) streams, "length" is somewhat
420// arbitrary, and perhaps not very meaningful.
421Readable.prototype._read = function (n) {
422 this.emit('error', new Error('not implemented'));
423};
424
425Readable.prototype.pipe = function (dest, pipeOpts) {
426 var src = this;
427 var state = this._readableState;
428
429 switch (state.pipesCount) {
430 case 0:
431 state.pipes = dest;
432 break;
433 case 1:
434 state.pipes = [state.pipes, dest];
435 break;
436 default:
437 state.pipes.push(dest);
438 break;
439 }
440 state.pipesCount += 1;
441 debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
442
443 var doEnd = (!pipeOpts || pipeOpts.end !== false);
444
445 var endFn = doEnd ? onend : cleanup;
446 if (state.endEmitted) nextTick(endFn);else src.once('end', endFn);
447
448 dest.on('unpipe', onunpipe);
449 function onunpipe(readable) {
450 debug('onunpipe');
451 if (readable === src) {
452 cleanup();
453 }
454 }
455
456 function onend() {
457 debug('onend');
458 dest.end();
459 }
460
461 // when the dest drains, it reduces the awaitDrain counter
462 // on the source. This would be more elegant with a .once()
463 // handler in flow(), but adding and removing repeatedly is
464 // too slow.
465 var ondrain = pipeOnDrain(src);
466 dest.on('drain', ondrain);
467
468 var cleanedUp = false;
469 function cleanup() {
470 debug('cleanup');
471 // cleanup event handlers once the pipe is broken
472 dest.removeListener('close', onclose);
473 dest.removeListener('finish', onfinish);
474 dest.removeListener('drain', ondrain);
475 dest.removeListener('error', onerror);
476 dest.removeListener('unpipe', onunpipe);
477 src.removeListener('end', onend);
478 src.removeListener('end', cleanup);
479 src.removeListener('data', ondata);
480
481 cleanedUp = true;
482
483 // if the reader is waiting for a drain event from this
484 // specific writer, then it would cause it to never start
485 // flowing again.
486 // So, if this is awaiting a drain, then we just call it now.
487 // If we don't know, then assume that we are waiting for one.
488 if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
489 }
490
491 // If the user pushes more data while we're writing to dest then we'll end up
492 // in ondata again. However, we only want to increase awaitDrain once because
493 // dest will only emit one 'drain' event for the multiple writes.
494 // => Introduce a guard on increasing awaitDrain.
495 var increasedAwaitDrain = false;
496 src.on('data', ondata);
497 function ondata(chunk) {
498 debug('ondata');
499 increasedAwaitDrain = false;
500 var ret = dest.write(chunk);
501 if (false === ret && !increasedAwaitDrain) {
502 // If the user unpiped during `dest.write()`, it is possible
503 // to get stuck in a permanently paused state if that write
504 // also returned false.
505 // => Check whether `dest` is still a piping destination.
506 if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
507 debug('false write response, pause', src._readableState.awaitDrain);
508 src._readableState.awaitDrain++;
509 increasedAwaitDrain = true;
510 }
511 src.pause();
512 }
513 }
514
515 // if the dest has an error, then stop piping into it.
516 // however, don't suppress the throwing behavior for this.
517 function onerror(er) {
518 debug('onerror', er);
519 unpipe();
520 dest.removeListener('error', onerror);
521 if (listenerCount(dest, 'error') === 0) dest.emit('error', er);
522 }
523
524 // Make sure our error handler is attached before userland ones.
525 prependListener(dest, 'error', onerror);
526
527 // Both close and finish should trigger unpipe, but only once.
528 function onclose() {
529 dest.removeListener('finish', onfinish);
530 unpipe();
531 }
532 dest.once('close', onclose);
533 function onfinish() {
534 debug('onfinish');
535 dest.removeListener('close', onclose);
536 unpipe();
537 }
538 dest.once('finish', onfinish);
539
540 function unpipe() {
541 debug('unpipe');
542 src.unpipe(dest);
543 }
544
545 // tell the dest that it's being piped to
546 dest.emit('pipe', src);
547
548 // start the flow if it hasn't been started already.
549 if (!state.flowing) {
550 debug('pipe resume');
551 src.resume();
552 }
553
554 return dest;
555};
556
557function pipeOnDrain(src) {
558 return function () {
559 var state = src._readableState;
560 debug('pipeOnDrain', state.awaitDrain);
561 if (state.awaitDrain) state.awaitDrain--;
562 if (state.awaitDrain === 0 && src.listeners('data').length) {
563 state.flowing = true;
564 flow(src);
565 }
566 };
567}
568
569Readable.prototype.unpipe = function (dest) {
570 var state = this._readableState;
571
572 // if we're not piping anywhere, then do nothing.
573 if (state.pipesCount === 0) return this;
574
575 // just one destination. most common case.
576 if (state.pipesCount === 1) {
577 // passed in one, but it's not the right one.
578 if (dest && dest !== state.pipes) return this;
579
580 if (!dest) dest = state.pipes;
581
582 // got a match.
583 state.pipes = null;
584 state.pipesCount = 0;
585 state.flowing = false;
586 if (dest) dest.emit('unpipe', this);
587 return this;
588 }
589
590 // slow case. multiple pipe destinations.
591
592 if (!dest) {
593 // remove all.
594 var dests = state.pipes;
595 var len = state.pipesCount;
596 state.pipes = null;
597 state.pipesCount = 0;
598 state.flowing = false;
599
600 for (var _i = 0; _i < len; _i++) {
601 dests[_i].emit('unpipe', this);
602 }return this;
603 }
604
605 // try to find the right one.
606 var i = indexOf(state.pipes, dest);
607 if (i === -1) return this;
608
609 state.pipes.splice(i, 1);
610 state.pipesCount -= 1;
611 if (state.pipesCount === 1) state.pipes = state.pipes[0];
612
613 dest.emit('unpipe', this);
614
615 return this;
616};
617
618// set up data events if they are asked for
619// Ensure readable listeners eventually get something
620Readable.prototype.on = function (ev, fn) {
621 var res = EventEmitter.prototype.on.call(this, ev, fn);
622
623 if (ev === 'data') {
624 // Start flowing on next tick if stream isn't explicitly paused
625 if (this._readableState.flowing !== false) this.resume();
626 } else if (ev === 'readable') {
627 var state = this._readableState;
628 if (!state.endEmitted && !state.readableListening) {
629 state.readableListening = state.needReadable = true;
630 state.emittedReadable = false;
631 if (!state.reading) {
632 nextTick(nReadingNextTick, this);
633 } else if (state.length) {
634 emitReadable(this, state);
635 }
636 }
637 }
638
639 return res;
640};
641Readable.prototype.addListener = Readable.prototype.on;
642
643function nReadingNextTick(self) {
644 debug('readable nexttick read 0');
645 self.read(0);
646}
647
648// pause() and resume() are remnants of the legacy readable stream API
649// If the user uses them, then switch into old mode.
650Readable.prototype.resume = function () {
651 var state = this._readableState;
652 if (!state.flowing) {
653 debug('resume');
654 state.flowing = true;
655 resume(this, state);
656 }
657 return this;
658};
659
660function resume(stream, state) {
661 if (!state.resumeScheduled) {
662 state.resumeScheduled = true;
663 nextTick(resume_, stream, state);
664 }
665}
666
667function resume_(stream, state) {
668 if (!state.reading) {
669 debug('resume read 0');
670 stream.read(0);
671 }
672
673 state.resumeScheduled = false;
674 state.awaitDrain = 0;
675 stream.emit('resume');
676 flow(stream);
677 if (state.flowing && !state.reading) stream.read(0);
678}
679
680Readable.prototype.pause = function () {
681 debug('call pause flowing=%j', this._readableState.flowing);
682 if (false !== this._readableState.flowing) {
683 debug('pause');
684 this._readableState.flowing = false;
685 this.emit('pause');
686 }
687 return this;
688};
689
690function flow(stream) {
691 var state = stream._readableState;
692 debug('flow', state.flowing);
693 while (state.flowing && stream.read() !== null) {}
694}
695
696// wrap an old-style stream as the async data source.
697// This is *not* part of the readable stream interface.
698// It is an ugly unfortunate mess of history.
699Readable.prototype.wrap = function (stream) {
700 var state = this._readableState;
701 var paused = false;
702
703 var self = this;
704 stream.on('end', function () {
705 debug('wrapped end');
706 if (state.decoder && !state.ended) {
707 var chunk = state.decoder.end();
708 if (chunk && chunk.length) self.push(chunk);
709 }
710
711 self.push(null);
712 });
713
714 stream.on('data', function (chunk) {
715 debug('wrapped data');
716 if (state.decoder) chunk = state.decoder.write(chunk);
717
718 // don't skip over falsy values in objectMode
719 if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
720
721 var ret = self.push(chunk);
722 if (!ret) {
723 paused = true;
724 stream.pause();
725 }
726 });
727
728 // proxy all the other methods.
729 // important when wrapping filters and duplexes.
730 for (var i in stream) {
731 if (this[i] === undefined && typeof stream[i] === 'function') {
732 this[i] = function (method) {
733 return function () {
734 return stream[method].apply(stream, arguments);
735 };
736 }(i);
737 }
738 }
739
740 // proxy certain important events.
741 var events = ['error', 'close', 'destroy', 'pause', 'resume'];
742 forEach(events, function (ev) {
743 stream.on(ev, self.emit.bind(self, ev));
744 });
745
746 // when we try to consume some more bytes, simply unpause the
747 // underlying stream.
748 self._read = function (n) {
749 debug('wrapped _read', n);
750 if (paused) {
751 paused = false;
752 stream.resume();
753 }
754 };
755
756 return self;
757};
758
759// exposed for testing purposes only.
760Readable._fromList = fromList;
761
762// Pluck off n bytes from an array of buffers.
763// Length is the combined lengths of all the buffers in the list.
764// This function is designed to be inlinable, so please take care when making
765// changes to the function body.
766function fromList(n, state) {
767 // nothing buffered
768 if (state.length === 0) return null;
769
770 var ret;
771 if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
772 // read it all, truncate the list
773 if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
774 state.buffer.clear();
775 } else {
776 // read part of list
777 ret = fromListPartial(n, state.buffer, state.decoder);
778 }
779
780 return ret;
781}
782
783// Extracts only enough buffered data to satisfy the amount requested.
784// This function is designed to be inlinable, so please take care when making
785// changes to the function body.
786function fromListPartial(n, list, hasStrings) {
787 var ret;
788 if (n < list.head.data.length) {
789 // slice is the same for buffers and strings
790 ret = list.head.data.slice(0, n);
791 list.head.data = list.head.data.slice(n);
792 } else if (n === list.head.data.length) {
793 // first chunk is a perfect match
794 ret = list.shift();
795 } else {
796 // result spans more than one buffer
797 ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
798 }
799 return ret;
800}
801
802// Copies a specified amount of characters from the list of buffered data
803// chunks.
804// This function is designed to be inlinable, so please take care when making
805// changes to the function body.
806function copyFromBufferString(n, list) {
807 var p = list.head;
808 var c = 1;
809 var ret = p.data;
810 n -= ret.length;
811 while (p = p.next) {
812 var str = p.data;
813 var nb = n > str.length ? str.length : n;
814 if (nb === str.length) ret += str;else ret += str.slice(0, n);
815 n -= nb;
816 if (n === 0) {
817 if (nb === str.length) {
818 ++c;
819 if (p.next) list.head = p.next;else list.head = list.tail = null;
820 } else {
821 list.head = p;
822 p.data = str.slice(nb);
823 }
824 break;
825 }
826 ++c;
827 }
828 list.length -= c;
829 return ret;
830}
831
832// Copies a specified amount of bytes from the list of buffered data chunks.
833// This function is designed to be inlinable, so please take care when making
834// changes to the function body.
835function copyFromBuffer(n, list) {
836 var ret = Buffer.allocUnsafe(n);
837 var p = list.head;
838 var c = 1;
839 p.data.copy(ret);
840 n -= p.data.length;
841 while (p = p.next) {
842 var buf = p.data;
843 var nb = n > buf.length ? buf.length : n;
844 buf.copy(ret, ret.length - n, 0, nb);
845 n -= nb;
846 if (n === 0) {
847 if (nb === buf.length) {
848 ++c;
849 if (p.next) list.head = p.next;else list.head = list.tail = null;
850 } else {
851 list.head = p;
852 p.data = buf.slice(nb);
853 }
854 break;
855 }
856 ++c;
857 }
858 list.length -= c;
859 return ret;
860}
861
862function endReadable(stream) {
863 var state = stream._readableState;
864
865 // If we get here before consuming all the bytes, then that is a
866 // bug in node. Should never happen.
867 if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
868
869 if (!state.endEmitted) {
870 state.ended = true;
871 nextTick(endReadableNT, state, stream);
872 }
873}
874
875function endReadableNT(state, stream) {
876 // Check that we didn't get one last unshift.
877 if (!state.endEmitted && state.length === 0) {
878 state.endEmitted = true;
879 stream.readable = false;
880 stream.emit('end');
881 }
882}
883
884function forEach(xs, f) {
885 for (var i = 0, l = xs.length; i < l; i++) {
886 f(xs[i], i);
887 }
888}
889
890function indexOf(xs, x) {
891 for (var i = 0, l = xs.length; i < l; i++) {
892 if (xs[i] === x) return i;
893 }
894 return -1;
895}