UNPKG

64.9 kBTypeScriptView Raw
1/**
2 * A stream is an abstract interface for working with streaming data in Node.js.
3 * The `stream` module provides an API for implementing the stream interface.
4 *
5 * There are many stream objects provided by Node.js. For instance, a `request to an HTTP server` and `process.stdout` are both stream instances.
6 *
7 * Streams can be readable, writable, or both. All streams are instances of `EventEmitter`.
8 *
9 * To access the `stream` module:
10 *
11 * ```js
12 * const stream = require('stream');
13 * ```
14 *
15 * The `stream` module is useful for creating new types of stream instances. It is
16 * usually not necessary to use the `stream` module to consume streams.
17 * @see [source](https://github.com/nodejs/node/blob/v17.0.0/lib/stream.js)
18 */
19declare module 'stream' {
20 import { EventEmitter, Abortable } from 'node:events';
21 import * as streamPromises from 'node:stream/promises';
22 import * as streamConsumers from 'node:stream/consumers';
23 import * as streamWeb from 'node:stream/web';
24 class internal extends EventEmitter {
25 pipe<T extends NodeJS.WritableStream>(
26 destination: T,
27 options?: {
28 end?: boolean | undefined;
29 }
30 ): T;
31 }
32 namespace internal {
33 class Stream extends internal {
34 constructor(opts?: ReadableOptions);
35 }
36 interface StreamOptions<T extends Stream> extends Abortable {
37 emitClose?: boolean | undefined;
38 highWaterMark?: number | undefined;
39 objectMode?: boolean | undefined;
40 construct?(this: T, callback: (error?: Error | null) => void): void;
41 destroy?(this: T, error: Error | null, callback: (error: Error | null) => void): void;
42 autoDestroy?: boolean | undefined;
43 }
44 interface ReadableOptions extends StreamOptions<Readable> {
45 encoding?: BufferEncoding | undefined;
46 read?(this: Readable, size: number): void;
47 }
48 /**
49 * @since v0.9.4
50 */
51 class Readable extends Stream implements NodeJS.ReadableStream {
52 /**
53 * A utility method for creating Readable Streams out of iterators.
54 */
55 static from(iterable: Iterable<any> | AsyncIterable<any>, options?: ReadableOptions): Readable;
56 /**
57 * A utility method for creating a `Readable` from a web `ReadableStream`.
58 * @since v17.0.0
59 * @experimental
60 */
61 static fromWeb(readableStream: streamWeb.ReadableStream, options?: Pick<ReadableOptions, 'encoding' | 'highWaterMark' | 'objectMode' | 'signal'>): Readable;
62 /**
63 * Returns whether the stream has been read from or cancelled.
64 * @since v16.8.0
65 */
66 static isDisturbed(stream: Readable | NodeJS.ReadableStream): boolean;
67 /**
68 * A utility method for creating a web `ReadableStream` from a `Readable`.
69 * @since v17.0.0
70 * @experimental
71 */
72 static toWeb(streamReadable: Readable): streamWeb.ReadableStream;
73 /**
74 * Returns whether the stream was destroyed or errored before emitting `'end'`.
75 * @since v16.8.0
76 * @experimental
77 */
78 readonly readableAborted: boolean;
79 /**
80 * Is `true` if it is safe to call `readable.read()`, which means
81 * the stream has not been destroyed or emitted `'error'` or `'end'`.
82 * @since v11.4.0
83 */
84 readable: boolean;
85 /**
86 * Returns whether `'data'` has been emitted.
87 * @since v16.7.0, v14.18.0
88 * @experimental
89 */
90 readonly readableDidRead: boolean;
91 /**
92 * Getter for the property `encoding` of a given `Readable` stream. The `encoding`property can be set using the `readable.setEncoding()` method.
93 * @since v12.7.0
94 */
95 readonly readableEncoding: BufferEncoding | null;
96 /**
97 * Becomes `true` when `'end'` event is emitted.
98 * @since v12.9.0
99 */
100 readonly readableEnded: boolean;
101 /**
102 * This property reflects the current state of a `Readable` stream as described
103 * in the `Three states` section.
104 * @since v9.4.0
105 */
106 readonly readableFlowing: boolean | null;
107 /**
108 * Returns the value of `highWaterMark` passed when creating this `Readable`.
109 * @since v9.3.0
110 */
111 readonly readableHighWaterMark: number;
112 /**
113 * This property contains the number of bytes (or objects) in the queue
114 * ready to be read. The value provides introspection data regarding
115 * the status of the `highWaterMark`.
116 * @since v9.4.0
117 */
118 readonly readableLength: number;
119 /**
120 * Getter for the property `objectMode` of a given `Readable` stream.
121 * @since v12.3.0
122 */
123 readonly readableObjectMode: boolean;
124 /**
125 * Is `true` after `readable.destroy()` has been called.
126 * @since v8.0.0
127 */
128 destroyed: boolean;
129 constructor(opts?: ReadableOptions);
130 _construct?(callback: (error?: Error | null) => void): void;
131 _read(size: number): void;
132 /**
133 * The `readable.read()` method pulls some data out of the internal buffer and
134 * returns it. If no data available to be read, `null` is returned. By default,
135 * the data will be returned as a `Buffer` object unless an encoding has been
136 * specified using the `readable.setEncoding()` method or the stream is operating
137 * in object mode.
138 *
139 * The optional `size` argument specifies a specific number of bytes to read. If`size` bytes are not available to be read, `null` will be returned _unless_the stream has ended, in which
140 * case all of the data remaining in the internal
141 * buffer will be returned.
142 *
143 * If the `size` argument is not specified, all of the data contained in the
144 * internal buffer will be returned.
145 *
146 * The `size` argument must be less than or equal to 1 GiB.
147 *
148 * The `readable.read()` method should only be called on `Readable` streams
149 * operating in paused mode. In flowing mode, `readable.read()` is called
150 * automatically until the internal buffer is fully drained.
151 *
152 * ```js
153 * const readable = getReadableStreamSomehow();
154 *
155 * // 'readable' may be triggered multiple times as data is buffered in
156 * readable.on('readable', () => {
157 * let chunk;
158 * console.log('Stream is readable (new data received in buffer)');
159 * // Use a loop to make sure we read all currently available data
160 * while (null !== (chunk = readable.read())) {
161 * console.log(`Read ${chunk.length} bytes of data...`);
162 * }
163 * });
164 *
165 * // 'end' will be triggered once when there is no more data available
166 * readable.on('end', () => {
167 * console.log('Reached end of stream.');
168 * });
169 * ```
170 *
171 * Each call to `readable.read()` returns a chunk of data, or `null`. The chunks
172 * are not concatenated. A `while` loop is necessary to consume all data
173 * currently in the buffer. When reading a large file `.read()` may return `null`,
174 * having consumed all buffered content so far, but there is still more data to
175 * come not yet buffered. In this case a new `'readable'` event will be emitted
176 * when there is more data in the buffer. Finally the `'end'` event will be
177 * emitted when there is no more data to come.
178 *
179 * Therefore to read a file's whole contents from a `readable`, it is necessary
180 * to collect chunks across multiple `'readable'` events:
181 *
182 * ```js
183 * const chunks = [];
184 *
185 * readable.on('readable', () => {
186 * let chunk;
187 * while (null !== (chunk = readable.read())) {
188 * chunks.push(chunk);
189 * }
190 * });
191 *
192 * readable.on('end', () => {
193 * const content = chunks.join('');
194 * });
195 * ```
196 *
197 * A `Readable` stream in object mode will always return a single item from
198 * a call to `readable.read(size)`, regardless of the value of the`size` argument.
199 *
200 * If the `readable.read()` method returns a chunk of data, a `'data'` event will
201 * also be emitted.
202 *
203 * Calling {@link read} after the `'end'` event has
204 * been emitted will return `null`. No runtime error will be raised.
205 * @since v0.9.4
206 * @param size Optional argument to specify how much data to read.
207 */
208 read(size?: number): any;
209 /**
210 * The `readable.setEncoding()` method sets the character encoding for
211 * data read from the `Readable` stream.
212 *
213 * By default, no encoding is assigned and stream data will be returned as`Buffer` objects. Setting an encoding causes the stream data
214 * to be returned as strings of the specified encoding rather than as `Buffer`objects. For instance, calling `readable.setEncoding('utf8')` will cause the
215 * output data to be interpreted as UTF-8 data, and passed as strings. Calling`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal
216 * string format.
217 *
218 * The `Readable` stream will properly handle multi-byte characters delivered
219 * through the stream that would otherwise become improperly decoded if simply
220 * pulled from the stream as `Buffer` objects.
221 *
222 * ```js
223 * const readable = getReadableStreamSomehow();
224 * readable.setEncoding('utf8');
225 * readable.on('data', (chunk) => {
226 * assert.equal(typeof chunk, 'string');
227 * console.log('Got %d characters of string data:', chunk.length);
228 * });
229 * ```
230 * @since v0.9.4
231 * @param encoding The encoding to use.
232 */
233 setEncoding(encoding: BufferEncoding): this;
234 /**
235 * The `readable.pause()` method will cause a stream in flowing mode to stop
236 * emitting `'data'` events, switching out of flowing mode. Any data that
237 * becomes available will remain in the internal buffer.
238 *
239 * ```js
240 * const readable = getReadableStreamSomehow();
241 * readable.on('data', (chunk) => {
242 * console.log(`Received ${chunk.length} bytes of data.`);
243 * readable.pause();
244 * console.log('There will be no additional data for 1 second.');
245 * setTimeout(() => {
246 * console.log('Now data will start flowing again.');
247 * readable.resume();
248 * }, 1000);
249 * });
250 * ```
251 *
252 * The `readable.pause()` method has no effect if there is a `'readable'`event listener.
253 * @since v0.9.4
254 */
255 pause(): this;
256 /**
257 * The `readable.resume()` method causes an explicitly paused `Readable` stream to
258 * resume emitting `'data'` events, switching the stream into flowing mode.
259 *
260 * The `readable.resume()` method can be used to fully consume the data from a
261 * stream without actually processing any of that data:
262 *
263 * ```js
264 * getReadableStreamSomehow()
265 * .resume()
266 * .on('end', () => {
267 * console.log('Reached the end, but did not read anything.');
268 * });
269 * ```
270 *
271 * The `readable.resume()` method has no effect if there is a `'readable'`event listener.
272 * @since v0.9.4
273 */
274 resume(): this;
275 /**
276 * The `readable.isPaused()` method returns the current operating state of the`Readable`. This is used primarily by the mechanism that underlies the`readable.pipe()` method. In most
277 * typical cases, there will be no reason to
278 * use this method directly.
279 *
280 * ```js
281 * const readable = new stream.Readable();
282 *
283 * readable.isPaused(); // === false
284 * readable.pause();
285 * readable.isPaused(); // === true
286 * readable.resume();
287 * readable.isPaused(); // === false
288 * ```
289 * @since v0.11.14
290 */
291 isPaused(): boolean;
292 /**
293 * The `readable.unpipe()` method detaches a `Writable` stream previously attached
294 * using the {@link pipe} method.
295 *
296 * If the `destination` is not specified, then _all_ pipes are detached.
297 *
298 * If the `destination` is specified, but no pipe is set up for it, then
299 * the method does nothing.
300 *
301 * ```js
302 * const fs = require('fs');
303 * const readable = getReadableStreamSomehow();
304 * const writable = fs.createWriteStream('file.txt');
305 * // All the data from readable goes into 'file.txt',
306 * // but only for the first second.
307 * readable.pipe(writable);
308 * setTimeout(() => {
309 * console.log('Stop writing to file.txt.');
310 * readable.unpipe(writable);
311 * console.log('Manually close the file stream.');
312 * writable.end();
313 * }, 1000);
314 * ```
315 * @since v0.9.4
316 * @param destination Optional specific stream to unpipe
317 */
318 unpipe(destination?: NodeJS.WritableStream): this;
319 /**
320 * Passing `chunk` as `null` signals the end of the stream (EOF) and behaves the
321 * same as `readable.push(null)`, after which no more data can be written. The EOF
322 * signal is put at the end of the buffer and any buffered data will still be
323 * flushed.
324 *
325 * The `readable.unshift()` method pushes a chunk of data back into the internal
326 * buffer. This is useful in certain situations where a stream is being consumed by
327 * code that needs to "un-consume" some amount of data that it has optimistically
328 * pulled out of the source, so that the data can be passed on to some other party.
329 *
330 * The `stream.unshift(chunk)` method cannot be called after the `'end'` event
331 * has been emitted or a runtime error will be thrown.
332 *
333 * Developers using `stream.unshift()` often should consider switching to
334 * use of a `Transform` stream instead. See the `API for stream implementers` section for more information.
335 *
336 * ```js
337 * // Pull off a header delimited by \n\n.
338 * // Use unshift() if we get too much.
339 * // Call the callback with (error, header, stream).
340 * const { StringDecoder } = require('string_decoder');
341 * function parseHeader(stream, callback) {
342 * stream.on('error', callback);
343 * stream.on('readable', onReadable);
344 * const decoder = new StringDecoder('utf8');
345 * let header = '';
346 * function onReadable() {
347 * let chunk;
348 * while (null !== (chunk = stream.read())) {
349 * const str = decoder.write(chunk);
350 * if (str.match(/\n\n/)) {
351 * // Found the header boundary.
352 * const split = str.split(/\n\n/);
353 * header += split.shift();
354 * const remaining = split.join('\n\n');
355 * const buf = Buffer.from(remaining, 'utf8');
356 * stream.removeListener('error', callback);
357 * // Remove the 'readable' listener before unshifting.
358 * stream.removeListener('readable', onReadable);
359 * if (buf.length)
360 * stream.unshift(buf);
361 * // Now the body of the message can be read from the stream.
362 * callback(null, header, stream);
363 * } else {
364 * // Still reading the header.
365 * header += str;
366 * }
367 * }
368 * }
369 * }
370 * ```
371 *
372 * Unlike {@link push}, `stream.unshift(chunk)` will not
373 * end the reading process by resetting the internal reading state of the stream.
374 * This can cause unexpected results if `readable.unshift()` is called during a
375 * read (i.e. from within a {@link _read} implementation on a
376 * custom stream). Following the call to `readable.unshift()` with an immediate {@link push} will reset the reading state appropriately,
377 * however it is best to simply avoid calling `readable.unshift()` while in the
378 * process of performing a read.
379 * @since v0.9.11
380 * @param chunk Chunk of data to unshift onto the read queue. For streams not operating in object mode, `chunk` must be a string, `Buffer`, `Uint8Array` or `null`. For object mode
381 * streams, `chunk` may be any JavaScript value.
382 * @param encoding Encoding of string chunks. Must be a valid `Buffer` encoding, such as `'utf8'` or `'ascii'`.
383 */
384 unshift(chunk: any, encoding?: BufferEncoding): void;
385 /**
386 * Prior to Node.js 0.10, streams did not implement the entire `stream` module API
387 * as it is currently defined. (See `Compatibility` for more information.)
388 *
389 * When using an older Node.js library that emits `'data'` events and has a {@link pause} method that is advisory only, the`readable.wrap()` method can be used to create a `Readable`
390 * stream that uses
391 * the old stream as its data source.
392 *
393 * It will rarely be necessary to use `readable.wrap()` but the method has been
394 * provided as a convenience for interacting with older Node.js applications and
395 * libraries.
396 *
397 * ```js
398 * const { OldReader } = require('./old-api-module.js');
399 * const { Readable } = require('stream');
400 * const oreader = new OldReader();
401 * const myReader = new Readable().wrap(oreader);
402 *
403 * myReader.on('readable', () => {
404 * myReader.read(); // etc.
405 * });
406 * ```
407 * @since v0.9.4
408 * @param stream An "old style" readable stream
409 */
410 wrap(stream: NodeJS.ReadableStream): this;
411 push(chunk: any, encoding?: BufferEncoding): boolean;
412 _destroy(error: Error | null, callback: (error?: Error | null) => void): void;
413 /**
414 * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the readable
415 * stream will release any internal resources and subsequent calls to `push()`will be ignored.
416 *
417 * Once `destroy()` has been called any further calls will be a no-op and no
418 * further errors except from `_destroy()` may be emitted as `'error'`.
419 *
420 * Implementors should not override this method, but instead implement `readable._destroy()`.
421 * @since v8.0.0
422 * @param error Error which will be passed as payload in `'error'` event
423 */
424 destroy(error?: Error): this;
425 /**
426 * Event emitter
427 * The defined events on documents including:
428 * 1. close
429 * 2. data
430 * 3. end
431 * 4. error
432 * 5. pause
433 * 6. readable
434 * 7. resume
435 */
436 addListener(event: 'close', listener: () => void): this;
437 addListener(event: 'data', listener: (chunk: any) => void): this;
438 addListener(event: 'end', listener: () => void): this;
439 addListener(event: 'error', listener: (err: Error) => void): this;
440 addListener(event: 'pause', listener: () => void): this;
441 addListener(event: 'readable', listener: () => void): this;
442 addListener(event: 'resume', listener: () => void): this;
443 addListener(event: string | symbol, listener: (...args: any[]) => void): this;
444 emit(event: 'close'): boolean;
445 emit(event: 'data', chunk: any): boolean;
446 emit(event: 'end'): boolean;
447 emit(event: 'error', err: Error): boolean;
448 emit(event: 'pause'): boolean;
449 emit(event: 'readable'): boolean;
450 emit(event: 'resume'): boolean;
451 emit(event: string | symbol, ...args: any[]): boolean;
452 on(event: 'close', listener: () => void): this;
453 on(event: 'data', listener: (chunk: any) => void): this;
454 on(event: 'end', listener: () => void): this;
455 on(event: 'error', listener: (err: Error) => void): this;
456 on(event: 'pause', listener: () => void): this;
457 on(event: 'readable', listener: () => void): this;
458 on(event: 'resume', listener: () => void): this;
459 on(event: string | symbol, listener: (...args: any[]) => void): this;
460 once(event: 'close', listener: () => void): this;
461 once(event: 'data', listener: (chunk: any) => void): this;
462 once(event: 'end', listener: () => void): this;
463 once(event: 'error', listener: (err: Error) => void): this;
464 once(event: 'pause', listener: () => void): this;
465 once(event: 'readable', listener: () => void): this;
466 once(event: 'resume', listener: () => void): this;
467 once(event: string | symbol, listener: (...args: any[]) => void): this;
468 prependListener(event: 'close', listener: () => void): this;
469 prependListener(event: 'data', listener: (chunk: any) => void): this;
470 prependListener(event: 'end', listener: () => void): this;
471 prependListener(event: 'error', listener: (err: Error) => void): this;
472 prependListener(event: 'pause', listener: () => void): this;
473 prependListener(event: 'readable', listener: () => void): this;
474 prependListener(event: 'resume', listener: () => void): this;
475 prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
476 prependOnceListener(event: 'close', listener: () => void): this;
477 prependOnceListener(event: 'data', listener: (chunk: any) => void): this;
478 prependOnceListener(event: 'end', listener: () => void): this;
479 prependOnceListener(event: 'error', listener: (err: Error) => void): this;
480 prependOnceListener(event: 'pause', listener: () => void): this;
481 prependOnceListener(event: 'readable', listener: () => void): this;
482 prependOnceListener(event: 'resume', listener: () => void): this;
483 prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
484 removeListener(event: 'close', listener: () => void): this;
485 removeListener(event: 'data', listener: (chunk: any) => void): this;
486 removeListener(event: 'end', listener: () => void): this;
487 removeListener(event: 'error', listener: (err: Error) => void): this;
488 removeListener(event: 'pause', listener: () => void): this;
489 removeListener(event: 'readable', listener: () => void): this;
490 removeListener(event: 'resume', listener: () => void): this;
491 removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
492 [Symbol.asyncIterator](): AsyncIterableIterator<any>;
493 }
494 interface WritableOptions extends StreamOptions<Writable> {
495 decodeStrings?: boolean | undefined;
496 defaultEncoding?: BufferEncoding | undefined;
497 write?(this: Writable, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
498 writev?(
499 this: Writable,
500 chunks: Array<{
501 chunk: any;
502 encoding: BufferEncoding;
503 }>,
504 callback: (error?: Error | null) => void
505 ): void;
506 final?(this: Writable, callback: (error?: Error | null) => void): void;
507 }
508 /**
509 * @since v0.9.4
510 */
511 class Writable extends Stream implements NodeJS.WritableStream {
512 /**
513 * Is `true` if it is safe to call `writable.write()`, which means
514 * the stream has not been destroyed, errored or ended.
515 * @since v11.4.0
516 */
517 readonly writable: boolean;
518 /**
519 * Is `true` after `writable.end()` has been called. This property
520 * does not indicate whether the data has been flushed, for this use `writable.writableFinished` instead.
521 * @since v12.9.0
522 */
523 readonly writableEnded: boolean;
524 /**
525 * Is set to `true` immediately before the `'finish'` event is emitted.
526 * @since v12.6.0
527 */
528 readonly writableFinished: boolean;
529 /**
530 * Return the value of `highWaterMark` passed when creating this `Writable`.
531 * @since v9.3.0
532 */
533 readonly writableHighWaterMark: number;
534 /**
535 * This property contains the number of bytes (or objects) in the queue
536 * ready to be written. The value provides introspection data regarding
537 * the status of the `highWaterMark`.
538 * @since v9.4.0
539 */
540 readonly writableLength: number;
541 /**
542 * Getter for the property `objectMode` of a given `Writable` stream.
543 * @since v12.3.0
544 */
545 readonly writableObjectMode: boolean;
546 /**
547 * Number of times `writable.uncork()` needs to be
548 * called in order to fully uncork the stream.
549 * @since v13.2.0, v12.16.0
550 */
551 readonly writableCorked: number;
552 /**
553 * Is `true` after `writable.destroy()` has been called.
554 * @since v8.0.0
555 */
556 destroyed: boolean;
557 constructor(opts?: WritableOptions);
558 _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
559 _writev?(
560 chunks: Array<{
561 chunk: any;
562 encoding: BufferEncoding;
563 }>,
564 callback: (error?: Error | null) => void
565 ): void;
566 _construct?(callback: (error?: Error | null) => void): void;
567 _destroy(error: Error | null, callback: (error?: Error | null) => void): void;
568 _final(callback: (error?: Error | null) => void): void;
569 /**
570 * The `writable.write()` method writes some data to the stream, and calls the
571 * supplied `callback` once the data has been fully handled. If an error
572 * occurs, the `callback` will be called with the error as its
573 * first argument. The `callback` is called asynchronously and before `'error'` is
574 * emitted.
575 *
576 * The return value is `true` if the internal buffer is less than the`highWaterMark` configured when the stream was created after admitting `chunk`.
577 * If `false` is returned, further attempts to write data to the stream should
578 * stop until the `'drain'` event is emitted.
579 *
580 * While a stream is not draining, calls to `write()` will buffer `chunk`, and
581 * return false. Once all currently buffered chunks are drained (accepted for
582 * delivery by the operating system), the `'drain'` event will be emitted.
583 * It is recommended that once `write()` returns false, no more chunks be written
584 * until the `'drain'` event is emitted. While calling `write()` on a stream that
585 * is not draining is allowed, Node.js will buffer all written chunks until
586 * maximum memory usage occurs, at which point it will abort unconditionally.
587 * Even before it aborts, high memory usage will cause poor garbage collector
588 * performance and high RSS (which is not typically released back to the system,
589 * even after the memory is no longer required). Since TCP sockets may never
590 * drain if the remote peer does not read the data, writing a socket that is
591 * not draining may lead to a remotely exploitable vulnerability.
592 *
593 * Writing data while the stream is not draining is particularly
594 * problematic for a `Transform`, because the `Transform` streams are paused
595 * by default until they are piped or a `'data'` or `'readable'` event handler
596 * is added.
597 *
598 * If the data to be written can be generated or fetched on demand, it is
599 * recommended to encapsulate the logic into a `Readable` and use {@link pipe}. However, if calling `write()` is preferred, it is
600 * possible to respect backpressure and avoid memory issues using the `'drain'` event:
601 *
602 * ```js
603 * function write(data, cb) {
604 * if (!stream.write(data)) {
605 * stream.once('drain', cb);
606 * } else {
607 * process.nextTick(cb);
608 * }
609 * }
610 *
611 * // Wait for cb to be called before doing any other write.
612 * write('hello', () => {
613 * console.log('Write completed, do more writes now.');
614 * });
615 * ```
616 *
617 * A `Writable` stream in object mode will always ignore the `encoding` argument.
618 * @since v0.9.4
619 * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any
620 * JavaScript value other than `null`.
621 * @param [encoding='utf8'] The encoding, if `chunk` is a string.
622 * @param callback Callback for when this chunk of data is flushed.
623 * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
624 */
625 write(chunk: any, callback?: (error: Error | null | undefined) => void): boolean;
626 write(chunk: any, encoding: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean;
627 /**
628 * The `writable.setDefaultEncoding()` method sets the default `encoding` for a `Writable` stream.
629 * @since v0.11.15
630 * @param encoding The new default encoding
631 */
632 setDefaultEncoding(encoding: BufferEncoding): this;
633 /**
634 * Calling the `writable.end()` method signals that no more data will be written
635 * to the `Writable`. The optional `chunk` and `encoding` arguments allow one
636 * final additional chunk of data to be written immediately before closing the
637 * stream.
638 *
639 * Calling the {@link write} method after calling {@link end} will raise an error.
640 *
641 * ```js
642 * // Write 'hello, ' and then end with 'world!'.
643 * const fs = require('fs');
644 * const file = fs.createWriteStream('example.txt');
645 * file.write('hello, ');
646 * file.end('world!');
647 * // Writing more now is not allowed!
648 * ```
649 * @since v0.9.4
650 * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any
651 * JavaScript value other than `null`.
652 * @param encoding The encoding if `chunk` is a string
653 * @param callback Callback for when the stream is finished.
654 */
655 end(cb?: () => void): this;
656 end(chunk: any, cb?: () => void): this;
657 end(chunk: any, encoding: BufferEncoding, cb?: () => void): this;
658 /**
659 * The `writable.cork()` method forces all written data to be buffered in memory.
660 * The buffered data will be flushed when either the {@link uncork} or {@link end} methods are called.
661 *
662 * The primary intent of `writable.cork()` is to accommodate a situation in which
663 * several small chunks are written to the stream in rapid succession. Instead of
664 * immediately forwarding them to the underlying destination, `writable.cork()`buffers all the chunks until `writable.uncork()` is called, which will pass them
665 * all to `writable._writev()`, if present. This prevents a head-of-line blocking
666 * situation where data is being buffered while waiting for the first small chunk
667 * to be processed. However, use of `writable.cork()` without implementing`writable._writev()` may have an adverse effect on throughput.
668 *
669 * See also: `writable.uncork()`, `writable._writev()`.
670 * @since v0.11.2
671 */
672 cork(): void;
673 /**
674 * The `writable.uncork()` method flushes all data buffered since {@link cork} was called.
675 *
676 * When using `writable.cork()` and `writable.uncork()` to manage the buffering
677 * of writes to a stream, it is recommended that calls to `writable.uncork()` be
678 * deferred using `process.nextTick()`. Doing so allows batching of all`writable.write()` calls that occur within a given Node.js event loop phase.
679 *
680 * ```js
681 * stream.cork();
682 * stream.write('some ');
683 * stream.write('data ');
684 * process.nextTick(() => stream.uncork());
685 * ```
686 *
687 * If the `writable.cork()` method is called multiple times on a stream, the
688 * same number of calls to `writable.uncork()` must be called to flush the buffered
689 * data.
690 *
691 * ```js
692 * stream.cork();
693 * stream.write('some ');
694 * stream.cork();
695 * stream.write('data ');
696 * process.nextTick(() => {
697 * stream.uncork();
698 * // The data will not be flushed until uncork() is called a second time.
699 * stream.uncork();
700 * });
701 * ```
702 *
703 * See also: `writable.cork()`.
704 * @since v0.11.2
705 */
706 uncork(): void;
707 /**
708 * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the writable
709 * stream has ended and subsequent calls to `write()` or `end()` will result in
710 * an `ERR_STREAM_DESTROYED` error.
711 * This is a destructive and immediate way to destroy a stream. Previous calls to`write()` may not have drained, and may trigger an `ERR_STREAM_DESTROYED` error.
712 * Use `end()` instead of destroy if data should flush before close, or wait for
713 * the `'drain'` event before destroying the stream.
714 *
715 * Once `destroy()` has been called any further calls will be a no-op and no
716 * further errors except from `_destroy()` may be emitted as `'error'`.
717 *
718 * Implementors should not override this method,
719 * but instead implement `writable._destroy()`.
720 * @since v8.0.0
721 * @param error Optional, an error to emit with `'error'` event.
722 */
723 destroy(error?: Error): this;
724 /**
725 * Event emitter
726 * The defined events on documents including:
727 * 1. close
728 * 2. drain
729 * 3. error
730 * 4. finish
731 * 5. pipe
732 * 6. unpipe
733 */
734 addListener(event: 'close', listener: () => void): this;
735 addListener(event: 'drain', listener: () => void): this;
736 addListener(event: 'error', listener: (err: Error) => void): this;
737 addListener(event: 'finish', listener: () => void): this;
738 addListener(event: 'pipe', listener: (src: Readable) => void): this;
739 addListener(event: 'unpipe', listener: (src: Readable) => void): this;
740 addListener(event: string | symbol, listener: (...args: any[]) => void): this;
741 emit(event: 'close'): boolean;
742 emit(event: 'drain'): boolean;
743 emit(event: 'error', err: Error): boolean;
744 emit(event: 'finish'): boolean;
745 emit(event: 'pipe', src: Readable): boolean;
746 emit(event: 'unpipe', src: Readable): boolean;
747 emit(event: string | symbol, ...args: any[]): boolean;
748 on(event: 'close', listener: () => void): this;
749 on(event: 'drain', listener: () => void): this;
750 on(event: 'error', listener: (err: Error) => void): this;
751 on(event: 'finish', listener: () => void): this;
752 on(event: 'pipe', listener: (src: Readable) => void): this;
753 on(event: 'unpipe', listener: (src: Readable) => void): this;
754 on(event: string | symbol, listener: (...args: any[]) => void): this;
755 once(event: 'close', listener: () => void): this;
756 once(event: 'drain', listener: () => void): this;
757 once(event: 'error', listener: (err: Error) => void): this;
758 once(event: 'finish', listener: () => void): this;
759 once(event: 'pipe', listener: (src: Readable) => void): this;
760 once(event: 'unpipe', listener: (src: Readable) => void): this;
761 once(event: string | symbol, listener: (...args: any[]) => void): this;
762 prependListener(event: 'close', listener: () => void): this;
763 prependListener(event: 'drain', listener: () => void): this;
764 prependListener(event: 'error', listener: (err: Error) => void): this;
765 prependListener(event: 'finish', listener: () => void): this;
766 prependListener(event: 'pipe', listener: (src: Readable) => void): this;
767 prependListener(event: 'unpipe', listener: (src: Readable) => void): this;
768 prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
769 prependOnceListener(event: 'close', listener: () => void): this;
770 prependOnceListener(event: 'drain', listener: () => void): this;
771 prependOnceListener(event: 'error', listener: (err: Error) => void): this;
772 prependOnceListener(event: 'finish', listener: () => void): this;
773 prependOnceListener(event: 'pipe', listener: (src: Readable) => void): this;
774 prependOnceListener(event: 'unpipe', listener: (src: Readable) => void): this;
775 prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
776 removeListener(event: 'close', listener: () => void): this;
777 removeListener(event: 'drain', listener: () => void): this;
778 removeListener(event: 'error', listener: (err: Error) => void): this;
779 removeListener(event: 'finish', listener: () => void): this;
780 removeListener(event: 'pipe', listener: (src: Readable) => void): this;
781 removeListener(event: 'unpipe', listener: (src: Readable) => void): this;
782 removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
783 }
784 interface DuplexOptions extends ReadableOptions, WritableOptions {
785 allowHalfOpen?: boolean | undefined;
786 readableObjectMode?: boolean | undefined;
787 writableObjectMode?: boolean | undefined;
788 readableHighWaterMark?: number | undefined;
789 writableHighWaterMark?: number | undefined;
790 writableCorked?: number | undefined;
791 construct?(this: Duplex, callback: (error?: Error | null) => void): void;
792 read?(this: Duplex, size: number): void;
793 write?(this: Duplex, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
794 writev?(
795 this: Duplex,
796 chunks: Array<{
797 chunk: any;
798 encoding: BufferEncoding;
799 }>,
800 callback: (error?: Error | null) => void
801 ): void;
802 final?(this: Duplex, callback: (error?: Error | null) => void): void;
803 destroy?(this: Duplex, error: Error | null, callback: (error: Error | null) => void): void;
804 }
805 /**
806 * Duplex streams are streams that implement both the `Readable` and `Writable` interfaces.
807 *
808 * Examples of `Duplex` streams include:
809 *
810 * * `TCP sockets`
811 * * `zlib streams`
812 * * `crypto streams`
813 * @since v0.9.4
814 */
815 class Duplex extends Readable implements Writable {
816 readonly writable: boolean;
817 readonly writableEnded: boolean;
818 readonly writableFinished: boolean;
819 readonly writableHighWaterMark: number;
820 readonly writableLength: number;
821 readonly writableObjectMode: boolean;
822 readonly writableCorked: number;
823 /**
824 * If `false` then the stream will automatically end the writable side when the
825 * readable side ends. Set initially by the `allowHalfOpen` constructor option,
826 * which defaults to `false`.
827 *
828 * This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is
829 * emitted.
830 * @since v0.9.4
831 */
832 allowHalfOpen: boolean;
833 constructor(opts?: DuplexOptions);
834 /**
835 * A utility method for creating duplex streams.
836 *
837 * - `Stream` converts writable stream into writable `Duplex` and readable stream
838 * to `Duplex`.
839 * - `Blob` converts into readable `Duplex`.
840 * - `string` converts into readable `Duplex`.
841 * - `ArrayBuffer` converts into readable `Duplex`.
842 * - `AsyncIterable` converts into a readable `Duplex`. Cannot yield `null`.
843 * - `AsyncGeneratorFunction` converts into a readable/writable transform
844 * `Duplex`. Must take a source `AsyncIterable` as first parameter. Cannot yield
845 * `null`.
846 * - `AsyncFunction` converts into a writable `Duplex`. Must return
847 * either `null` or `undefined`
848 * - `Object ({ writable, readable })` converts `readable` and
849 * `writable` into `Stream` and then combines them into `Duplex` where the
850 * `Duplex` will write to the `writable` and read from the `readable`.
851 * - `Promise` converts into readable `Duplex`. Value `null` is ignored.
852 *
853 * @since v16.8.0
854 */
855 static from(src: Stream | Blob | ArrayBuffer | string | Iterable<any> | AsyncIterable<any> | AsyncGeneratorFunction | Promise<any> | Object): Duplex;
856 _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
857 _writev?(
858 chunks: Array<{
859 chunk: any;
860 encoding: BufferEncoding;
861 }>,
862 callback: (error?: Error | null) => void
863 ): void;
864 _destroy(error: Error | null, callback: (error: Error | null) => void): void;
865 _final(callback: (error?: Error | null) => void): void;
866 write(chunk: any, encoding?: BufferEncoding, cb?: (error: Error | null | undefined) => void): boolean;
867 write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean;
868 setDefaultEncoding(encoding: BufferEncoding): this;
869 end(cb?: () => void): this;
870 end(chunk: any, cb?: () => void): this;
871 end(chunk: any, encoding?: BufferEncoding, cb?: () => void): this;
872 cork(): void;
873 uncork(): void;
874 }
875 type TransformCallback = (error?: Error | null, data?: any) => void;
876 interface TransformOptions extends DuplexOptions {
877 construct?(this: Transform, callback: (error?: Error | null) => void): void;
878 read?(this: Transform, size: number): void;
879 write?(this: Transform, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
880 writev?(
881 this: Transform,
882 chunks: Array<{
883 chunk: any;
884 encoding: BufferEncoding;
885 }>,
886 callback: (error?: Error | null) => void
887 ): void;
888 final?(this: Transform, callback: (error?: Error | null) => void): void;
889 destroy?(this: Transform, error: Error | null, callback: (error: Error | null) => void): void;
890 transform?(this: Transform, chunk: any, encoding: BufferEncoding, callback: TransformCallback): void;
891 flush?(this: Transform, callback: TransformCallback): void;
892 }
893 /**
894 * Transform streams are `Duplex` streams where the output is in some way
895 * related to the input. Like all `Duplex` streams, `Transform` streams
896 * implement both the `Readable` and `Writable` interfaces.
897 *
898 * Examples of `Transform` streams include:
899 *
900 * * `zlib streams`
901 * * `crypto streams`
902 * @since v0.9.4
903 */
904 class Transform extends Duplex {
905 constructor(opts?: TransformOptions);
906 _transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback): void;
907 _flush(callback: TransformCallback): void;
908 }
909 /**
910 * The `stream.PassThrough` class is a trivial implementation of a `Transform` stream that simply passes the input bytes across to the output. Its purpose is
911 * primarily for examples and testing, but there are some use cases where`stream.PassThrough` is useful as a building block for novel sorts of streams.
912 */
913 class PassThrough extends Transform {}
914 /**
915 * Attaches an AbortSignal to a readable or writeable stream. This lets code
916 * control stream destruction using an `AbortController`.
917 *
918 * Calling `abort` on the `AbortController` corresponding to the passed`AbortSignal` will behave the same way as calling `.destroy(new AbortError())`on the stream.
919 *
920 * ```js
921 * const fs = require('fs');
922 *
923 * const controller = new AbortController();
924 * const read = addAbortSignal(
925 * controller.signal,
926 * fs.createReadStream(('object.json'))
927 * );
928 * // Later, abort the operation closing the stream
929 * controller.abort();
930 * ```
931 *
932 * Or using an `AbortSignal` with a readable stream as an async iterable:
933 *
934 * ```js
935 * const controller = new AbortController();
936 * setTimeout(() => controller.abort(), 10_000); // set a timeout
937 * const stream = addAbortSignal(
938 * controller.signal,
939 * fs.createReadStream(('object.json'))
940 * );
941 * (async () => {
942 * try {
943 * for await (const chunk of stream) {
944 * await process(chunk);
945 * }
946 * } catch (e) {
947 * if (e.name === 'AbortError') {
948 * // The operation was cancelled
949 * } else {
950 * throw e;
951 * }
952 * }
953 * })();
954 * ```
955 * @since v15.4.0
956 * @param signal A signal representing possible cancellation
957 * @param stream a stream to attach a signal to
958 */
959 function addAbortSignal<T extends Stream>(signal: AbortSignal, stream: T): T;
960 interface FinishedOptions extends Abortable {
961 error?: boolean | undefined;
962 readable?: boolean | undefined;
963 writable?: boolean | undefined;
964 }
965 /**
966 * A function to get notified when a stream is no longer readable, writable
967 * or has experienced an error or a premature close event.
968 *
969 * ```js
970 * const { finished } = require('stream');
971 *
972 * const rs = fs.createReadStream('archive.tar');
973 *
974 * finished(rs, (err) => {
975 * if (err) {
976 * console.error('Stream failed.', err);
977 * } else {
978 * console.log('Stream is done reading.');
979 * }
980 * });
981 *
982 * rs.resume(); // Drain the stream.
983 * ```
984 *
985 * Especially useful in error handling scenarios where a stream is destroyed
986 * prematurely (like an aborted HTTP request), and will not emit `'end'`or `'finish'`.
987 *
988 * The `finished` API provides promise version:
989 *
990 * ```js
991 * const { finished } = require('stream/promises');
992 *
993 * const rs = fs.createReadStream('archive.tar');
994 *
995 * async function run() {
996 * await finished(rs);
997 * console.log('Stream is done reading.');
998 * }
999 *
1000 * run().catch(console.error);
1001 * rs.resume(); // Drain the stream.
1002 * ```
1003 *
1004 * `stream.finished()` leaves dangling event listeners (in particular`'error'`, `'end'`, `'finish'` and `'close'`) after `callback` has been
1005 * invoked. The reason for this is so that unexpected `'error'` events (due to
1006 * incorrect stream implementations) do not cause unexpected crashes.
1007 * If this is unwanted behavior then the returned cleanup function needs to be
1008 * invoked in the callback:
1009 *
1010 * ```js
1011 * const cleanup = finished(rs, (err) => {
1012 * cleanup();
1013 * // ...
1014 * });
1015 * ```
1016 * @since v10.0.0
1017 * @param stream A readable and/or writable stream.
1018 * @param callback A callback function that takes an optional error argument.
1019 * @return A cleanup function which removes all registered listeners.
1020 */
1021 function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options: FinishedOptions, callback: (err?: NodeJS.ErrnoException | null) => void): () => void;
1022 function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, callback: (err?: NodeJS.ErrnoException | null) => void): () => void;
1023 namespace finished {
1024 function __promisify__(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options?: FinishedOptions): Promise<void>;
1025 }
1026 type PipelineSourceFunction<T> = () => Iterable<T> | AsyncIterable<T>;
1027 type PipelineSource<T> = Iterable<T> | AsyncIterable<T> | NodeJS.ReadableStream | PipelineSourceFunction<T>;
1028 type PipelineTransform<S extends PipelineTransformSource<any>, U> =
1029 | NodeJS.ReadWriteStream
1030 | ((source: S extends (...args: any[]) => Iterable<infer ST> | AsyncIterable<infer ST> ? AsyncIterable<ST> : S) => AsyncIterable<U>);
1031 type PipelineTransformSource<T> = PipelineSource<T> | PipelineTransform<any, T>;
1032 type PipelineDestinationIterableFunction<T> = (source: AsyncIterable<T>) => AsyncIterable<any>;
1033 type PipelineDestinationPromiseFunction<T, P> = (source: AsyncIterable<T>) => Promise<P>;
1034 type PipelineDestination<S extends PipelineTransformSource<any>, P> = S extends PipelineTransformSource<infer ST>
1035 ? NodeJS.WritableStream | PipelineDestinationIterableFunction<ST> | PipelineDestinationPromiseFunction<ST, P>
1036 : never;
1037 type PipelineCallback<S extends PipelineDestination<any, any>> = S extends PipelineDestinationPromiseFunction<any, infer P>
1038 ? (err: NodeJS.ErrnoException | null, value: P) => void
1039 : (err: NodeJS.ErrnoException | null) => void;
1040 type PipelinePromise<S extends PipelineDestination<any, any>> = S extends PipelineDestinationPromiseFunction<any, infer P> ? Promise<P> : Promise<void>;
1041 interface PipelineOptions {
1042 signal: AbortSignal;
1043 }
1044 /**
1045 * A module method to pipe between streams and generators forwarding errors and
1046 * properly cleaning up and provide a callback when the pipeline is complete.
1047 *
1048 * ```js
1049 * const { pipeline } = require('stream');
1050 * const fs = require('fs');
1051 * const zlib = require('zlib');
1052 *
1053 * // Use the pipeline API to easily pipe a series of streams
1054 * // together and get notified when the pipeline is fully done.
1055 *
1056 * // A pipeline to gzip a potentially huge tar file efficiently:
1057 *
1058 * pipeline(
1059 * fs.createReadStream('archive.tar'),
1060 * zlib.createGzip(),
1061 * fs.createWriteStream('archive.tar.gz'),
1062 * (err) => {
1063 * if (err) {
1064 * console.error('Pipeline failed.', err);
1065 * } else {
1066 * console.log('Pipeline succeeded.');
1067 * }
1068 * }
1069 * );
1070 * ```
1071 *
1072 * The `pipeline` API provides a promise version, which can also
1073 * receive an options argument as the last parameter with a`signal` `AbortSignal` property. When the signal is aborted,`destroy` will be called on the underlying pipeline, with
1074 * an`AbortError`.
1075 *
1076 * ```js
1077 * const { pipeline } = require('stream/promises');
1078 *
1079 * async function run() {
1080 * await pipeline(
1081 * fs.createReadStream('archive.tar'),
1082 * zlib.createGzip(),
1083 * fs.createWriteStream('archive.tar.gz')
1084 * );
1085 * console.log('Pipeline succeeded.');
1086 * }
1087 *
1088 * run().catch(console.error);
1089 * ```
1090 *
1091 * To use an `AbortSignal`, pass it inside an options object,
1092 * as the last argument:
1093 *
1094 * ```js
1095 * const { pipeline } = require('stream/promises');
1096 *
1097 * async function run() {
1098 * const ac = new AbortController();
1099 * const signal = ac.signal;
1100 *
1101 * setTimeout(() => ac.abort(), 1);
1102 * await pipeline(
1103 * fs.createReadStream('archive.tar'),
1104 * zlib.createGzip(),
1105 * fs.createWriteStream('archive.tar.gz'),
1106 * { signal },
1107 * );
1108 * }
1109 *
1110 * run().catch(console.error); // AbortError
1111 * ```
1112 *
1113 * The `pipeline` API also supports async generators:
1114 *
1115 * ```js
1116 * const { pipeline } = require('stream/promises');
1117 * const fs = require('fs');
1118 *
1119 * async function run() {
1120 * await pipeline(
1121 * fs.createReadStream('lowercase.txt'),
1122 * async function* (source, signal) {
1123 * source.setEncoding('utf8'); // Work with strings rather than `Buffer`s.
1124 * for await (const chunk of source) {
1125 * yield await processChunk(chunk, { signal });
1126 * }
1127 * },
1128 * fs.createWriteStream('uppercase.txt')
1129 * );
1130 * console.log('Pipeline succeeded.');
1131 * }
1132 *
1133 * run().catch(console.error);
1134 * ```
1135 *
1136 * Remember to handle the `signal` argument passed into the async generator.
1137 * Especially in the case where the async generator is the source for the
1138 * pipeline (i.e. first argument) or the pipeline will never complete.
1139 *
1140 * ```js
1141 * const { pipeline } = require('stream/promises');
1142 * const fs = require('fs');
1143 *
1144 * async function run() {
1145 * await pipeline(
1146 * async function * (signal) {
1147 * await someLongRunningfn({ signal });
1148 * yield 'asd';
1149 * },
1150 * fs.createWriteStream('uppercase.txt')
1151 * );
1152 * console.log('Pipeline succeeded.');
1153 * }
1154 *
1155 * run().catch(console.error);
1156 * ```
1157 *
1158 * `stream.pipeline()` will call `stream.destroy(err)` on all streams except:
1159 *
1160 * * `Readable` streams which have emitted `'end'` or `'close'`.
1161 * * `Writable` streams which have emitted `'finish'` or `'close'`.
1162 *
1163 * `stream.pipeline()` leaves dangling event listeners on the streams
1164 * after the `callback` has been invoked. In the case of reuse of streams after
1165 * failure, this can cause event listener leaks and swallowed errors.
1166 * @since v10.0.0
1167 * @param callback Called when the pipeline is fully done.
1168 */
1169 function pipeline<A extends PipelineSource<any>, B extends PipelineDestination<A, any>>(
1170 source: A,
1171 destination: B,
1172 callback?: PipelineCallback<B>
1173 ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
1174 function pipeline<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, B extends PipelineDestination<T1, any>>(
1175 source: A,
1176 transform1: T1,
1177 destination: B,
1178 callback?: PipelineCallback<B>
1179 ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
1180 function pipeline<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, B extends PipelineDestination<T2, any>>(
1181 source: A,
1182 transform1: T1,
1183 transform2: T2,
1184 destination: B,
1185 callback?: PipelineCallback<B>
1186 ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
1187 function pipeline<
1188 A extends PipelineSource<any>,
1189 T1 extends PipelineTransform<A, any>,
1190 T2 extends PipelineTransform<T1, any>,
1191 T3 extends PipelineTransform<T2, any>,
1192 B extends PipelineDestination<T3, any>
1193 >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, callback?: PipelineCallback<B>): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
1194 function pipeline<
1195 A extends PipelineSource<any>,
1196 T1 extends PipelineTransform<A, any>,
1197 T2 extends PipelineTransform<T1, any>,
1198 T3 extends PipelineTransform<T2, any>,
1199 T4 extends PipelineTransform<T3, any>,
1200 B extends PipelineDestination<T4, any>
1201 >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, callback?: PipelineCallback<B>): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
1202 function pipeline(
1203 streams: ReadonlyArray<NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream>,
1204 callback?: (err: NodeJS.ErrnoException | null) => void
1205 ): NodeJS.WritableStream;
1206 function pipeline(
1207 stream1: NodeJS.ReadableStream,
1208 stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream,
1209 ...streams: Array<NodeJS.ReadWriteStream | NodeJS.WritableStream | ((err: NodeJS.ErrnoException | null) => void)>
1210 ): NodeJS.WritableStream;
1211 namespace pipeline {
1212 function __promisify__<A extends PipelineSource<any>, B extends PipelineDestination<A, any>>(source: A, destination: B, options?: PipelineOptions): PipelinePromise<B>;
1213 function __promisify__<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, B extends PipelineDestination<T1, any>>(
1214 source: A,
1215 transform1: T1,
1216 destination: B,
1217 options?: PipelineOptions
1218 ): PipelinePromise<B>;
1219 function __promisify__<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, B extends PipelineDestination<T2, any>>(
1220 source: A,
1221 transform1: T1,
1222 transform2: T2,
1223 destination: B,
1224 options?: PipelineOptions
1225 ): PipelinePromise<B>;
1226 function __promisify__<
1227 A extends PipelineSource<any>,
1228 T1 extends PipelineTransform<A, any>,
1229 T2 extends PipelineTransform<T1, any>,
1230 T3 extends PipelineTransform<T2, any>,
1231 B extends PipelineDestination<T3, any>
1232 >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, options?: PipelineOptions): PipelinePromise<B>;
1233 function __promisify__<
1234 A extends PipelineSource<any>,
1235 T1 extends PipelineTransform<A, any>,
1236 T2 extends PipelineTransform<T1, any>,
1237 T3 extends PipelineTransform<T2, any>,
1238 T4 extends PipelineTransform<T3, any>,
1239 B extends PipelineDestination<T4, any>
1240 >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, options?: PipelineOptions): PipelinePromise<B>;
1241 function __promisify__(streams: ReadonlyArray<NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream>, options?: PipelineOptions): Promise<void>;
1242 function __promisify__(
1243 stream1: NodeJS.ReadableStream,
1244 stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream,
1245 ...streams: Array<NodeJS.ReadWriteStream | NodeJS.WritableStream | PipelineOptions>
1246 ): Promise<void>;
1247 }
1248 interface Pipe {
1249 close(): void;
1250 hasRef(): boolean;
1251 ref(): void;
1252 unref(): void;
1253 }
1254
1255 /**
1256 * Returns whether the stream has encountered an error.
1257 * @since v17.3.0
1258 */
1259 function isErrored(stream: Readable | Writable | NodeJS.ReadableStream | NodeJS.WritableStream): boolean;
1260
1261 /**
1262 * Returns whether the stream is readable.
1263 * @since v17.4.0
1264 */
1265 function isReadable(stream: Readable | NodeJS.ReadableStream): boolean;
1266
1267 const promises: typeof streamPromises;
1268 const consumers: typeof streamConsumers;
1269 }
1270 export = internal;
1271}
1272declare module 'node:stream' {
1273 import stream = require('stream');
1274 export = stream;
1275}