UNPKG

60.3 kBTypeScriptView Raw
1/**
2 * A stream is an abstract interface for working with streaming data in Node.js.
3 * The `stream` module provides an API for implementing the stream interface.
4 *
5 * There are many stream objects provided by Node.js. For instance, a `request to an HTTP server` and `process.stdout` are both stream instances.
6 *
7 * Streams can be readable, writable, or both. All streams are instances of `EventEmitter`.
8 *
9 * To access the `stream` module:
10 *
11 * ```js
12 * const stream = require('stream');
13 * ```
14 *
15 * The `stream` module is useful for creating new types of stream instances. It is
16 * usually not necessary to use the `stream` module to consume streams.
17 * @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/stream.js)
18 */
19declare module 'stream' {
20 import { EventEmitter, Abortable } from 'node:events';
21 import * as streamPromises from 'node:stream/promises';
22 class internal extends EventEmitter {
23 pipe<T extends NodeJS.WritableStream>(
24 destination: T,
25 options?: {
26 end?: boolean | undefined;
27 }
28 ): T;
29 }
30 namespace internal {
31 class Stream extends internal {
32 constructor(opts?: ReadableOptions);
33 }
34 interface StreamOptions<T extends Stream> extends Abortable {
35 emitClose?: boolean | undefined;
36 highWaterMark?: number | undefined;
37 objectMode?: boolean | undefined;
38 construct?(this: T, callback: (error?: Error | null) => void): void;
39 destroy?(this: T, error: Error | null, callback: (error: Error | null) => void): void;
40 autoDestroy?: boolean | undefined;
41 }
42 interface ReadableOptions extends StreamOptions<Readable> {
43 encoding?: BufferEncoding | undefined;
44 read?(this: Readable, size: number): void;
45 }
46 /**
47 * @since v0.9.4
48 */
49 class Readable extends Stream implements NodeJS.ReadableStream {
50 /**
51 * A utility method for creating Readable Streams out of iterators.
52 */
53 static from(iterable: Iterable<any> | AsyncIterable<any>, options?: ReadableOptions): Readable;
54 /**
55 * Is `true` if it is safe to call `readable.read()`, which means
56 * the stream has not been destroyed or emitted `'error'` or `'end'`.
57 * @since v11.4.0
58 */
59 readable: boolean;
60 /**
61 * Getter for the property `encoding` of a given `Readable` stream. The `encoding`property can be set using the `readable.setEncoding()` method.
62 * @since v12.7.0
63 */
64 readonly readableEncoding: BufferEncoding | null;
65 /**
66 * Becomes `true` when `'end'` event is emitted.
67 * @since v12.9.0
68 */
69 readonly readableEnded: boolean;
70 /**
71 * This property reflects the current state of a `Readable` stream as described
72 * in the `Three states` section.
73 * @since v9.4.0
74 */
75 readonly readableFlowing: boolean | null;
76 /**
77 * Returns the value of `highWaterMark` passed when creating this `Readable`.
78 * @since v9.3.0
79 */
80 readonly readableHighWaterMark: number;
81 /**
82 * This property contains the number of bytes (or objects) in the queue
83 * ready to be read. The value provides introspection data regarding
84 * the status of the `highWaterMark`.
85 * @since v9.4.0
86 */
87 readonly readableLength: number;
88 /**
89 * Getter for the property `objectMode` of a given `Readable` stream.
90 * @since v12.3.0
91 */
92 readonly readableObjectMode: boolean;
93 /**
94 * Is `true` after `readable.destroy()` has been called.
95 * @since v8.0.0
96 */
97 destroyed: boolean;
98 constructor(opts?: ReadableOptions);
99 _construct?(callback: (error?: Error | null) => void): void;
100 _read(size: number): void;
101 /**
102 * The `readable.read()` method pulls some data out of the internal buffer and
103 * returns it. If no data available to be read, `null` is returned. By default,
104 * the data will be returned as a `Buffer` object unless an encoding has been
105 * specified using the `readable.setEncoding()` method or the stream is operating
106 * in object mode.
107 *
108 * The optional `size` argument specifies a specific number of bytes to read. If`size` bytes are not available to be read, `null` will be returned _unless_the stream has ended, in which
109 * case all of the data remaining in the internal
110 * buffer will be returned.
111 *
112 * If the `size` argument is not specified, all of the data contained in the
113 * internal buffer will be returned.
114 *
115 * The `size` argument must be less than or equal to 1 GiB.
116 *
117 * The `readable.read()` method should only be called on `Readable` streams
118 * operating in paused mode. In flowing mode, `readable.read()` is called
119 * automatically until the internal buffer is fully drained.
120 *
121 * ```js
122 * const readable = getReadableStreamSomehow();
123 *
124 * // 'readable' may be triggered multiple times as data is buffered in
125 * readable.on('readable', () => {
126 * let chunk;
127 * console.log('Stream is readable (new data received in buffer)');
128 * // Use a loop to make sure we read all currently available data
129 * while (null !== (chunk = readable.read())) {
130 * console.log(`Read ${chunk.length} bytes of data...`);
131 * }
132 * });
133 *
134 * // 'end' will be triggered once when there is no more data available
135 * readable.on('end', () => {
136 * console.log('Reached end of stream.');
137 * });
138 * ```
139 *
140 * Each call to `readable.read()` returns a chunk of data, or `null`. The chunks
141 * are not concatenated. A `while` loop is necessary to consume all data
142 * currently in the buffer. When reading a large file `.read()` may return `null`,
143 * having consumed all buffered content so far, but there is still more data to
144 * come not yet buffered. In this case a new `'readable'` event will be emitted
145 * when there is more data in the buffer. Finally the `'end'` event will be
146 * emitted when there is no more data to come.
147 *
148 * Therefore to read a file's whole contents from a `readable`, it is necessary
149 * to collect chunks across multiple `'readable'` events:
150 *
151 * ```js
152 * const chunks = [];
153 *
154 * readable.on('readable', () => {
155 * let chunk;
156 * while (null !== (chunk = readable.read())) {
157 * chunks.push(chunk);
158 * }
159 * });
160 *
161 * readable.on('end', () => {
162 * const content = chunks.join('');
163 * });
164 * ```
165 *
166 * A `Readable` stream in object mode will always return a single item from
167 * a call to `readable.read(size)`, regardless of the value of the`size` argument.
168 *
169 * If the `readable.read()` method returns a chunk of data, a `'data'` event will
170 * also be emitted.
171 *
172 * Calling {@link read} after the `'end'` event has
173 * been emitted will return `null`. No runtime error will be raised.
174 * @since v0.9.4
175 * @param size Optional argument to specify how much data to read.
176 */
177 read(size?: number): any;
178 /**
179 * The `readable.setEncoding()` method sets the character encoding for
180 * data read from the `Readable` stream.
181 *
182 * By default, no encoding is assigned and stream data will be returned as`Buffer` objects. Setting an encoding causes the stream data
183 * to be returned as strings of the specified encoding rather than as `Buffer`objects. For instance, calling `readable.setEncoding('utf8')` will cause the
184 * output data to be interpreted as UTF-8 data, and passed as strings. Calling`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal
185 * string format.
186 *
187 * The `Readable` stream will properly handle multi-byte characters delivered
188 * through the stream that would otherwise become improperly decoded if simply
189 * pulled from the stream as `Buffer` objects.
190 *
191 * ```js
192 * const readable = getReadableStreamSomehow();
193 * readable.setEncoding('utf8');
194 * readable.on('data', (chunk) => {
195 * assert.equal(typeof chunk, 'string');
196 * console.log('Got %d characters of string data:', chunk.length);
197 * });
198 * ```
199 * @since v0.9.4
200 * @param encoding The encoding to use.
201 */
202 setEncoding(encoding: BufferEncoding): this;
203 /**
204 * The `readable.pause()` method will cause a stream in flowing mode to stop
205 * emitting `'data'` events, switching out of flowing mode. Any data that
206 * becomes available will remain in the internal buffer.
207 *
208 * ```js
209 * const readable = getReadableStreamSomehow();
210 * readable.on('data', (chunk) => {
211 * console.log(`Received ${chunk.length} bytes of data.`);
212 * readable.pause();
213 * console.log('There will be no additional data for 1 second.');
214 * setTimeout(() => {
215 * console.log('Now data will start flowing again.');
216 * readable.resume();
217 * }, 1000);
218 * });
219 * ```
220 *
221 * The `readable.pause()` method has no effect if there is a `'readable'`event listener.
222 * @since v0.9.4
223 */
224 pause(): this;
225 /**
226 * The `readable.resume()` method causes an explicitly paused `Readable` stream to
227 * resume emitting `'data'` events, switching the stream into flowing mode.
228 *
229 * The `readable.resume()` method can be used to fully consume the data from a
230 * stream without actually processing any of that data:
231 *
232 * ```js
233 * getReadableStreamSomehow()
234 * .resume()
235 * .on('end', () => {
236 * console.log('Reached the end, but did not read anything.');
237 * });
238 * ```
239 *
240 * The `readable.resume()` method has no effect if there is a `'readable'`event listener.
241 * @since v0.9.4
242 */
243 resume(): this;
244 /**
245 * The `readable.isPaused()` method returns the current operating state of the`Readable`. This is used primarily by the mechanism that underlies the`readable.pipe()` method. In most
246 * typical cases, there will be no reason to
247 * use this method directly.
248 *
249 * ```js
250 * const readable = new stream.Readable();
251 *
252 * readable.isPaused(); // === false
253 * readable.pause();
254 * readable.isPaused(); // === true
255 * readable.resume();
256 * readable.isPaused(); // === false
257 * ```
258 * @since v0.11.14
259 */
260 isPaused(): boolean;
261 /**
262 * The `readable.unpipe()` method detaches a `Writable` stream previously attached
263 * using the {@link pipe} method.
264 *
265 * If the `destination` is not specified, then _all_ pipes are detached.
266 *
267 * If the `destination` is specified, but no pipe is set up for it, then
268 * the method does nothing.
269 *
270 * ```js
271 * const fs = require('fs');
272 * const readable = getReadableStreamSomehow();
273 * const writable = fs.createWriteStream('file.txt');
274 * // All the data from readable goes into 'file.txt',
275 * // but only for the first second.
276 * readable.pipe(writable);
277 * setTimeout(() => {
278 * console.log('Stop writing to file.txt.');
279 * readable.unpipe(writable);
280 * console.log('Manually close the file stream.');
281 * writable.end();
282 * }, 1000);
283 * ```
284 * @since v0.9.4
285 * @param destination Optional specific stream to unpipe
286 */
287 unpipe(destination?: NodeJS.WritableStream): this;
288 /**
289 * Passing `chunk` as `null` signals the end of the stream (EOF) and behaves the
290 * same as `readable.push(null)`, after which no more data can be written. The EOF
291 * signal is put at the end of the buffer and any buffered data will still be
292 * flushed.
293 *
294 * The `readable.unshift()` method pushes a chunk of data back into the internal
295 * buffer. This is useful in certain situations where a stream is being consumed by
296 * code that needs to "un-consume" some amount of data that it has optimistically
297 * pulled out of the source, so that the data can be passed on to some other party.
298 *
299 * The `stream.unshift(chunk)` method cannot be called after the `'end'` event
300 * has been emitted or a runtime error will be thrown.
301 *
302 * Developers using `stream.unshift()` often should consider switching to
303 * use of a `Transform` stream instead. See the `API for stream implementers` section for more information.
304 *
305 * ```js
306 * // Pull off a header delimited by \n\n.
307 * // Use unshift() if we get too much.
308 * // Call the callback with (error, header, stream).
309 * const { StringDecoder } = require('string_decoder');
310 * function parseHeader(stream, callback) {
311 * stream.on('error', callback);
312 * stream.on('readable', onReadable);
313 * const decoder = new StringDecoder('utf8');
314 * let header = '';
315 * function onReadable() {
316 * let chunk;
317 * while (null !== (chunk = stream.read())) {
318 * const str = decoder.write(chunk);
319 * if (str.match(/\n\n/)) {
320 * // Found the header boundary.
321 * const split = str.split(/\n\n/);
322 * header += split.shift();
323 * const remaining = split.join('\n\n');
324 * const buf = Buffer.from(remaining, 'utf8');
325 * stream.removeListener('error', callback);
326 * // Remove the 'readable' listener before unshifting.
327 * stream.removeListener('readable', onReadable);
328 * if (buf.length)
329 * stream.unshift(buf);
330 * // Now the body of the message can be read from the stream.
331 * callback(null, header, stream);
332 * } else {
333 * // Still reading the header.
334 * header += str;
335 * }
336 * }
337 * }
338 * }
339 * ```
340 *
341 * Unlike {@link push}, `stream.unshift(chunk)` will not
342 * end the reading process by resetting the internal reading state of the stream.
343 * This can cause unexpected results if `readable.unshift()` is called during a
344 * read (i.e. from within a {@link _read} implementation on a
345 * custom stream). Following the call to `readable.unshift()` with an immediate {@link push} will reset the reading state appropriately,
346 * however it is best to simply avoid calling `readable.unshift()` while in the
347 * process of performing a read.
348 * @since v0.9.11
349 * @param chunk Chunk of data to unshift onto the read queue. For streams not operating in object mode, `chunk` must be a string, `Buffer`, `Uint8Array` or `null`. For object mode
350 * streams, `chunk` may be any JavaScript value.
351 * @param encoding Encoding of string chunks. Must be a valid `Buffer` encoding, such as `'utf8'` or `'ascii'`.
352 */
353 unshift(chunk: any, encoding?: BufferEncoding): void;
354 /**
355 * Prior to Node.js 0.10, streams did not implement the entire `stream` module API
356 * as it is currently defined. (See `Compatibility` for more information.)
357 *
358 * When using an older Node.js library that emits `'data'` events and has a {@link pause} method that is advisory only, the`readable.wrap()` method can be used to create a `Readable`
359 * stream that uses
360 * the old stream as its data source.
361 *
362 * It will rarely be necessary to use `readable.wrap()` but the method has been
363 * provided as a convenience for interacting with older Node.js applications and
364 * libraries.
365 *
366 * ```js
367 * const { OldReader } = require('./old-api-module.js');
368 * const { Readable } = require('stream');
369 * const oreader = new OldReader();
370 * const myReader = new Readable().wrap(oreader);
371 *
372 * myReader.on('readable', () => {
373 * myReader.read(); // etc.
374 * });
375 * ```
376 * @since v0.9.4
377 * @param stream An "old style" readable stream
378 */
379 wrap(stream: NodeJS.ReadableStream): this;
380 push(chunk: any, encoding?: BufferEncoding): boolean;
381 _destroy(error: Error | null, callback: (error?: Error | null) => void): void;
382 /**
383 * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the readable
384 * stream will release any internal resources and subsequent calls to `push()`will be ignored.
385 *
386 * Once `destroy()` has been called any further calls will be a no-op and no
387 * further errors except from `_destroy()` may be emitted as `'error'`.
388 *
389 * Implementors should not override this method, but instead implement `readable._destroy()`.
390 * @since v8.0.0
391 * @param error Error which will be passed as payload in `'error'` event
392 */
393 destroy(error?: Error): void;
394 /**
395 * Event emitter
396 * The defined events on documents including:
397 * 1. close
398 * 2. data
399 * 3. end
400 * 4. error
401 * 5. pause
402 * 6. readable
403 * 7. resume
404 */
405 addListener(event: 'close', listener: () => void): this;
406 addListener(event: 'data', listener: (chunk: any) => void): this;
407 addListener(event: 'end', listener: () => void): this;
408 addListener(event: 'error', listener: (err: Error) => void): this;
409 addListener(event: 'pause', listener: () => void): this;
410 addListener(event: 'readable', listener: () => void): this;
411 addListener(event: 'resume', listener: () => void): this;
412 addListener(event: string | symbol, listener: (...args: any[]) => void): this;
413 emit(event: 'close'): boolean;
414 emit(event: 'data', chunk: any): boolean;
415 emit(event: 'end'): boolean;
416 emit(event: 'error', err: Error): boolean;
417 emit(event: 'pause'): boolean;
418 emit(event: 'readable'): boolean;
419 emit(event: 'resume'): boolean;
420 emit(event: string | symbol, ...args: any[]): boolean;
421 on(event: 'close', listener: () => void): this;
422 on(event: 'data', listener: (chunk: any) => void): this;
423 on(event: 'end', listener: () => void): this;
424 on(event: 'error', listener: (err: Error) => void): this;
425 on(event: 'pause', listener: () => void): this;
426 on(event: 'readable', listener: () => void): this;
427 on(event: 'resume', listener: () => void): this;
428 on(event: string | symbol, listener: (...args: any[]) => void): this;
429 once(event: 'close', listener: () => void): this;
430 once(event: 'data', listener: (chunk: any) => void): this;
431 once(event: 'end', listener: () => void): this;
432 once(event: 'error', listener: (err: Error) => void): this;
433 once(event: 'pause', listener: () => void): this;
434 once(event: 'readable', listener: () => void): this;
435 once(event: 'resume', listener: () => void): this;
436 once(event: string | symbol, listener: (...args: any[]) => void): this;
437 prependListener(event: 'close', listener: () => void): this;
438 prependListener(event: 'data', listener: (chunk: any) => void): this;
439 prependListener(event: 'end', listener: () => void): this;
440 prependListener(event: 'error', listener: (err: Error) => void): this;
441 prependListener(event: 'pause', listener: () => void): this;
442 prependListener(event: 'readable', listener: () => void): this;
443 prependListener(event: 'resume', listener: () => void): this;
444 prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
445 prependOnceListener(event: 'close', listener: () => void): this;
446 prependOnceListener(event: 'data', listener: (chunk: any) => void): this;
447 prependOnceListener(event: 'end', listener: () => void): this;
448 prependOnceListener(event: 'error', listener: (err: Error) => void): this;
449 prependOnceListener(event: 'pause', listener: () => void): this;
450 prependOnceListener(event: 'readable', listener: () => void): this;
451 prependOnceListener(event: 'resume', listener: () => void): this;
452 prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
453 removeListener(event: 'close', listener: () => void): this;
454 removeListener(event: 'data', listener: (chunk: any) => void): this;
455 removeListener(event: 'end', listener: () => void): this;
456 removeListener(event: 'error', listener: (err: Error) => void): this;
457 removeListener(event: 'pause', listener: () => void): this;
458 removeListener(event: 'readable', listener: () => void): this;
459 removeListener(event: 'resume', listener: () => void): this;
460 removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
461 [Symbol.asyncIterator](): AsyncIterableIterator<any>;
462 }
463 interface WritableOptions extends StreamOptions<Writable> {
464 decodeStrings?: boolean | undefined;
465 defaultEncoding?: BufferEncoding | undefined;
466 write?(this: Writable, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
467 writev?(
468 this: Writable,
469 chunks: Array<{
470 chunk: any;
471 encoding: BufferEncoding;
472 }>,
473 callback: (error?: Error | null) => void
474 ): void;
475 final?(this: Writable, callback: (error?: Error | null) => void): void;
476 }
477 /**
478 * @since v0.9.4
479 */
480 class Writable extends Stream implements NodeJS.WritableStream {
481 /**
482 * Is `true` if it is safe to call `writable.write()`, which means
483 * the stream has not been destroyed, errored or ended.
484 * @since v11.4.0
485 */
486 readonly writable: boolean;
487 /**
488 * Is `true` after `writable.end()` has been called. This property
489 * does not indicate whether the data has been flushed, for this use `writable.writableFinished` instead.
490 * @since v12.9.0
491 */
492 readonly writableEnded: boolean;
493 /**
494 * Is set to `true` immediately before the `'finish'` event is emitted.
495 * @since v12.6.0
496 */
497 readonly writableFinished: boolean;
498 /**
499 * Return the value of `highWaterMark` passed when creating this `Writable`.
500 * @since v9.3.0
501 */
502 readonly writableHighWaterMark: number;
503 /**
504 * This property contains the number of bytes (or objects) in the queue
505 * ready to be written. The value provides introspection data regarding
506 * the status of the `highWaterMark`.
507 * @since v9.4.0
508 */
509 readonly writableLength: number;
510 /**
511 * Getter for the property `objectMode` of a given `Writable` stream.
512 * @since v12.3.0
513 */
514 readonly writableObjectMode: boolean;
515 /**
516 * Number of times `writable.uncork()` needs to be
517 * called in order to fully uncork the stream.
518 * @since v13.2.0, v12.16.0
519 */
520 readonly writableCorked: number;
521 /**
522 * Is `true` after `writable.destroy()` has been called.
523 * @since v8.0.0
524 */
525 destroyed: boolean;
526 constructor(opts?: WritableOptions);
527 _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
528 _writev?(
529 chunks: Array<{
530 chunk: any;
531 encoding: BufferEncoding;
532 }>,
533 callback: (error?: Error | null) => void
534 ): void;
535 _construct?(callback: (error?: Error | null) => void): void;
536 _destroy(error: Error | null, callback: (error?: Error | null) => void): void;
537 _final(callback: (error?: Error | null) => void): void;
538 /**
539 * The `writable.write()` method writes some data to the stream, and calls the
540 * supplied `callback` once the data has been fully handled. If an error
541 * occurs, the `callback` will be called with the error as its
542 * first argument. The `callback` is called asynchronously and before `'error'` is
543 * emitted.
544 *
545 * The return value is `true` if the internal buffer is less than the`highWaterMark` configured when the stream was created after admitting `chunk`.
546 * If `false` is returned, further attempts to write data to the stream should
547 * stop until the `'drain'` event is emitted.
548 *
549 * While a stream is not draining, calls to `write()` will buffer `chunk`, and
550 * return false. Once all currently buffered chunks are drained (accepted for
551 * delivery by the operating system), the `'drain'` event will be emitted.
552 * It is recommended that once `write()` returns false, no more chunks be written
553 * until the `'drain'` event is emitted. While calling `write()` on a stream that
554 * is not draining is allowed, Node.js will buffer all written chunks until
555 * maximum memory usage occurs, at which point it will abort unconditionally.
556 * Even before it aborts, high memory usage will cause poor garbage collector
557 * performance and high RSS (which is not typically released back to the system,
558 * even after the memory is no longer required). Since TCP sockets may never
559 * drain if the remote peer does not read the data, writing a socket that is
560 * not draining may lead to a remotely exploitable vulnerability.
561 *
562 * Writing data while the stream is not draining is particularly
563 * problematic for a `Transform`, because the `Transform` streams are paused
564 * by default until they are piped or a `'data'` or `'readable'` event handler
565 * is added.
566 *
567 * If the data to be written can be generated or fetched on demand, it is
568 * recommended to encapsulate the logic into a `Readable` and use {@link pipe}. However, if calling `write()` is preferred, it is
569 * possible to respect backpressure and avoid memory issues using the `'drain'` event:
570 *
571 * ```js
572 * function write(data, cb) {
573 * if (!stream.write(data)) {
574 * stream.once('drain', cb);
575 * } else {
576 * process.nextTick(cb);
577 * }
578 * }
579 *
580 * // Wait for cb to be called before doing any other write.
581 * write('hello', () => {
582 * console.log('Write completed, do more writes now.');
583 * });
584 * ```
585 *
586 * A `Writable` stream in object mode will always ignore the `encoding` argument.
587 * @since v0.9.4
588 * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any
589 * JavaScript value other than `null`.
590 * @param [encoding='utf8'] The encoding, if `chunk` is a string.
591 * @param callback Callback for when this chunk of data is flushed.
592 * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
593 */
594 write(chunk: any, callback?: (error: Error | null | undefined) => void): boolean;
595 write(chunk: any, encoding: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean;
596 /**
597 * The `writable.setDefaultEncoding()` method sets the default `encoding` for a `Writable` stream.
598 * @since v0.11.15
599 * @param encoding The new default encoding
600 */
601 setDefaultEncoding(encoding: BufferEncoding): this;
602 /**
603 * Calling the `writable.end()` method signals that no more data will be written
604 * to the `Writable`. The optional `chunk` and `encoding` arguments allow one
605 * final additional chunk of data to be written immediately before closing the
606 * stream.
607 *
608 * Calling the {@link write} method after calling {@link end} will raise an error.
609 *
610 * ```js
611 * // Write 'hello, ' and then end with 'world!'.
612 * const fs = require('fs');
613 * const file = fs.createWriteStream('example.txt');
614 * file.write('hello, ');
615 * file.end('world!');
616 * // Writing more now is not allowed!
617 * ```
618 * @since v0.9.4
619 * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any
620 * JavaScript value other than `null`.
621 * @param encoding The encoding if `chunk` is a string
622 * @param callback Callback for when the stream is finished.
623 */
624 end(cb?: () => void): void;
625 end(chunk: any, cb?: () => void): void;
626 end(chunk: any, encoding: BufferEncoding, cb?: () => void): void;
627 /**
628 * The `writable.cork()` method forces all written data to be buffered in memory.
629 * The buffered data will be flushed when either the {@link uncork} or {@link end} methods are called.
630 *
631 * The primary intent of `writable.cork()` is to accommodate a situation in which
632 * several small chunks are written to the stream in rapid succession. Instead of
633 * immediately forwarding them to the underlying destination, `writable.cork()`buffers all the chunks until `writable.uncork()` is called, which will pass them
634 * all to `writable._writev()`, if present. This prevents a head-of-line blocking
635 * situation where data is being buffered while waiting for the first small chunk
636 * to be processed. However, use of `writable.cork()` without implementing`writable._writev()` may have an adverse effect on throughput.
637 *
638 * See also: `writable.uncork()`, `writable._writev()`.
639 * @since v0.11.2
640 */
641 cork(): void;
642 /**
643 * The `writable.uncork()` method flushes all data buffered since {@link cork} was called.
644 *
645 * When using `writable.cork()` and `writable.uncork()` to manage the buffering
646 * of writes to a stream, it is recommended that calls to `writable.uncork()` be
647 * deferred using `process.nextTick()`. Doing so allows batching of all`writable.write()` calls that occur within a given Node.js event loop phase.
648 *
649 * ```js
650 * stream.cork();
651 * stream.write('some ');
652 * stream.write('data ');
653 * process.nextTick(() => stream.uncork());
654 * ```
655 *
656 * If the `writable.cork()` method is called multiple times on a stream, the
657 * same number of calls to `writable.uncork()` must be called to flush the buffered
658 * data.
659 *
660 * ```js
661 * stream.cork();
662 * stream.write('some ');
663 * stream.cork();
664 * stream.write('data ');
665 * process.nextTick(() => {
666 * stream.uncork();
667 * // The data will not be flushed until uncork() is called a second time.
668 * stream.uncork();
669 * });
670 * ```
671 *
672 * See also: `writable.cork()`.
673 * @since v0.11.2
674 */
675 uncork(): void;
676 /**
677 * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the writable
678 * stream has ended and subsequent calls to `write()` or `end()` will result in
679 * an `ERR_STREAM_DESTROYED` error.
680 * This is a destructive and immediate way to destroy a stream. Previous calls to`write()` may not have drained, and may trigger an `ERR_STREAM_DESTROYED` error.
681 * Use `end()` instead of destroy if data should flush before close, or wait for
682 * the `'drain'` event before destroying the stream.
683 *
684 * Once `destroy()` has been called any further calls will be a no-op and no
685 * further errors except from `_destroy()` may be emitted as `'error'`.
686 *
687 * Implementors should not override this method,
688 * but instead implement `writable._destroy()`.
689 * @since v8.0.0
690 * @param error Optional, an error to emit with `'error'` event.
691 */
692 destroy(error?: Error): void;
693 /**
694 * Event emitter
695 * The defined events on documents including:
696 * 1. close
697 * 2. drain
698 * 3. error
699 * 4. finish
700 * 5. pipe
701 * 6. unpipe
702 */
703 addListener(event: 'close', listener: () => void): this;
704 addListener(event: 'drain', listener: () => void): this;
705 addListener(event: 'error', listener: (err: Error) => void): this;
706 addListener(event: 'finish', listener: () => void): this;
707 addListener(event: 'pipe', listener: (src: Readable) => void): this;
708 addListener(event: 'unpipe', listener: (src: Readable) => void): this;
709 addListener(event: string | symbol, listener: (...args: any[]) => void): this;
710 emit(event: 'close'): boolean;
711 emit(event: 'drain'): boolean;
712 emit(event: 'error', err: Error): boolean;
713 emit(event: 'finish'): boolean;
714 emit(event: 'pipe', src: Readable): boolean;
715 emit(event: 'unpipe', src: Readable): boolean;
716 emit(event: string | symbol, ...args: any[]): boolean;
717 on(event: 'close', listener: () => void): this;
718 on(event: 'drain', listener: () => void): this;
719 on(event: 'error', listener: (err: Error) => void): this;
720 on(event: 'finish', listener: () => void): this;
721 on(event: 'pipe', listener: (src: Readable) => void): this;
722 on(event: 'unpipe', listener: (src: Readable) => void): this;
723 on(event: string | symbol, listener: (...args: any[]) => void): this;
724 once(event: 'close', listener: () => void): this;
725 once(event: 'drain', listener: () => void): this;
726 once(event: 'error', listener: (err: Error) => void): this;
727 once(event: 'finish', listener: () => void): this;
728 once(event: 'pipe', listener: (src: Readable) => void): this;
729 once(event: 'unpipe', listener: (src: Readable) => void): this;
730 once(event: string | symbol, listener: (...args: any[]) => void): this;
731 prependListener(event: 'close', listener: () => void): this;
732 prependListener(event: 'drain', listener: () => void): this;
733 prependListener(event: 'error', listener: (err: Error) => void): this;
734 prependListener(event: 'finish', listener: () => void): this;
735 prependListener(event: 'pipe', listener: (src: Readable) => void): this;
736 prependListener(event: 'unpipe', listener: (src: Readable) => void): this;
737 prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
738 prependOnceListener(event: 'close', listener: () => void): this;
739 prependOnceListener(event: 'drain', listener: () => void): this;
740 prependOnceListener(event: 'error', listener: (err: Error) => void): this;
741 prependOnceListener(event: 'finish', listener: () => void): this;
742 prependOnceListener(event: 'pipe', listener: (src: Readable) => void): this;
743 prependOnceListener(event: 'unpipe', listener: (src: Readable) => void): this;
744 prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
745 removeListener(event: 'close', listener: () => void): this;
746 removeListener(event: 'drain', listener: () => void): this;
747 removeListener(event: 'error', listener: (err: Error) => void): this;
748 removeListener(event: 'finish', listener: () => void): this;
749 removeListener(event: 'pipe', listener: (src: Readable) => void): this;
750 removeListener(event: 'unpipe', listener: (src: Readable) => void): this;
751 removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
752 }
753 interface DuplexOptions extends ReadableOptions, WritableOptions {
754 allowHalfOpen?: boolean | undefined;
755 readableObjectMode?: boolean | undefined;
756 writableObjectMode?: boolean | undefined;
757 readableHighWaterMark?: number | undefined;
758 writableHighWaterMark?: number | undefined;
759 writableCorked?: number | undefined;
760 construct?(this: Duplex, callback: (error?: Error | null) => void): void;
761 read?(this: Duplex, size: number): void;
762 write?(this: Duplex, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
763 writev?(
764 this: Duplex,
765 chunks: Array<{
766 chunk: any;
767 encoding: BufferEncoding;
768 }>,
769 callback: (error?: Error | null) => void
770 ): void;
771 final?(this: Duplex, callback: (error?: Error | null) => void): void;
772 destroy?(this: Duplex, error: Error | null, callback: (error: Error | null) => void): void;
773 }
774 /**
775 * Duplex streams are streams that implement both the `Readable` and `Writable` interfaces.
776 *
777 * Examples of `Duplex` streams include:
778 *
779 * * `TCP sockets`
780 * * `zlib streams`
781 * * `crypto streams`
782 * @since v0.9.4
783 */
784 class Duplex extends Readable implements Writable {
785 readonly writable: boolean;
786 readonly writableEnded: boolean;
787 readonly writableFinished: boolean;
788 readonly writableHighWaterMark: number;
789 readonly writableLength: number;
790 readonly writableObjectMode: boolean;
791 readonly writableCorked: number;
792 constructor(opts?: DuplexOptions);
793 _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
794 _writev?(
795 chunks: Array<{
796 chunk: any;
797 encoding: BufferEncoding;
798 }>,
799 callback: (error?: Error | null) => void
800 ): void;
801 _destroy(error: Error | null, callback: (error: Error | null) => void): void;
802 _final(callback: (error?: Error | null) => void): void;
803 write(chunk: any, encoding?: BufferEncoding, cb?: (error: Error | null | undefined) => void): boolean;
804 write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean;
805 setDefaultEncoding(encoding: BufferEncoding): this;
806 end(cb?: () => void): void;
807 end(chunk: any, cb?: () => void): void;
808 end(chunk: any, encoding?: BufferEncoding, cb?: () => void): void;
809 cork(): void;
810 uncork(): void;
811 }
812 type TransformCallback = (error?: Error | null, data?: any) => void;
813 interface TransformOptions extends DuplexOptions {
814 construct?(this: Transform, callback: (error?: Error | null) => void): void;
815 read?(this: Transform, size: number): void;
816 write?(this: Transform, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
817 writev?(
818 this: Transform,
819 chunks: Array<{
820 chunk: any;
821 encoding: BufferEncoding;
822 }>,
823 callback: (error?: Error | null) => void
824 ): void;
825 final?(this: Transform, callback: (error?: Error | null) => void): void;
826 destroy?(this: Transform, error: Error | null, callback: (error: Error | null) => void): void;
827 transform?(this: Transform, chunk: any, encoding: BufferEncoding, callback: TransformCallback): void;
828 flush?(this: Transform, callback: TransformCallback): void;
829 }
830 /**
831 * Transform streams are `Duplex` streams where the output is in some way
832 * related to the input. Like all `Duplex` streams, `Transform` streams
833 * implement both the `Readable` and `Writable` interfaces.
834 *
835 * Examples of `Transform` streams include:
836 *
837 * * `zlib streams`
838 * * `crypto streams`
839 * @since v0.9.4
840 */
841 class Transform extends Duplex {
842 constructor(opts?: TransformOptions);
843 _transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback): void;
844 _flush(callback: TransformCallback): void;
845 }
846 /**
847 * The `stream.PassThrough` class is a trivial implementation of a `Transform` stream that simply passes the input bytes across to the output. Its purpose is
848 * primarily for examples and testing, but there are some use cases where`stream.PassThrough` is useful as a building block for novel sorts of streams.
849 */
850 class PassThrough extends Transform {}
851 /**
852 * Attaches an AbortSignal to a readable or writeable stream. This lets code
853 * control stream destruction using an `AbortController`.
854 *
855 * Calling `abort` on the `AbortController` corresponding to the passed`AbortSignal` will behave the same way as calling `.destroy(new AbortError())`on the stream.
856 *
857 * ```js
858 * const fs = require('fs');
859 *
860 * const controller = new AbortController();
861 * const read = addAbortSignal(
862 * controller.signal,
863 * fs.createReadStream(('object.json'))
864 * );
865 * // Later, abort the operation closing the stream
866 * controller.abort();
867 * ```
868 *
869 * Or using an `AbortSignal` with a readable stream as an async iterable:
870 *
871 * ```js
872 * const controller = new AbortController();
873 * setTimeout(() => controller.abort(), 10_000); // set a timeout
874 * const stream = addAbortSignal(
875 * controller.signal,
876 * fs.createReadStream(('object.json'))
877 * );
878 * (async () => {
879 * try {
880 * for await (const chunk of stream) {
881 * await process(chunk);
882 * }
883 * } catch (e) {
884 * if (e.name === 'AbortError') {
885 * // The operation was cancelled
886 * } else {
887 * throw e;
888 * }
889 * }
890 * })();
891 * ```
892 * @since v15.4.0
893 * @param signal A signal representing possible cancellation
894 * @param stream a stream to attach a signal to
895 */
896 function addAbortSignal<T extends Stream>(signal: AbortSignal, stream: T): T;
897 interface FinishedOptions extends Abortable {
898 error?: boolean | undefined;
899 readable?: boolean | undefined;
900 writable?: boolean | undefined;
901 }
902 /**
903 * A function to get notified when a stream is no longer readable, writable
904 * or has experienced an error or a premature close event.
905 *
906 * ```js
907 * const { finished } = require('stream');
908 *
909 * const rs = fs.createReadStream('archive.tar');
910 *
911 * finished(rs, (err) => {
912 * if (err) {
913 * console.error('Stream failed.', err);
914 * } else {
915 * console.log('Stream is done reading.');
916 * }
917 * });
918 *
919 * rs.resume(); // Drain the stream.
920 * ```
921 *
922 * Especially useful in error handling scenarios where a stream is destroyed
923 * prematurely (like an aborted HTTP request), and will not emit `'end'`or `'finish'`.
924 *
925 * The `finished` API provides promise version:
926 *
927 * ```js
928 * const { finished } = require('stream/promises');
929 *
930 * const rs = fs.createReadStream('archive.tar');
931 *
932 * async function run() {
933 * await finished(rs);
934 * console.log('Stream is done reading.');
935 * }
936 *
937 * run().catch(console.error);
938 * rs.resume(); // Drain the stream.
939 * ```
940 *
941 * `stream.finished()` leaves dangling event listeners (in particular`'error'`, `'end'`, `'finish'` and `'close'`) after `callback` has been
942 * invoked. The reason for this is so that unexpected `'error'` events (due to
943 * incorrect stream implementations) do not cause unexpected crashes.
944 * If this is unwanted behavior then the returned cleanup function needs to be
945 * invoked in the callback:
946 *
947 * ```js
948 * const cleanup = finished(rs, (err) => {
949 * cleanup();
950 * // ...
951 * });
952 * ```
953 * @since v10.0.0
954 * @param stream A readable and/or writable stream.
955 * @param callback A callback function that takes an optional error argument.
956 * @return A cleanup function which removes all registered listeners.
957 */
958 function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options: FinishedOptions, callback: (err?: NodeJS.ErrnoException | null) => void): () => void;
959 function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, callback: (err?: NodeJS.ErrnoException | null) => void): () => void;
960 namespace finished {
961 function __promisify__(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options?: FinishedOptions): Promise<void>;
962 }
963 type PipelineSourceFunction<T> = () => Iterable<T> | AsyncIterable<T>;
964 type PipelineSource<T> = Iterable<T> | AsyncIterable<T> | NodeJS.ReadableStream | PipelineSourceFunction<T>;
965 type PipelineTransform<S extends PipelineTransformSource<any>, U> =
966 | NodeJS.ReadWriteStream
967 | ((source: S extends (...args: any[]) => Iterable<infer ST> | AsyncIterable<infer ST> ? AsyncIterable<ST> : S) => AsyncIterable<U>);
968 type PipelineTransformSource<T> = PipelineSource<T> | PipelineTransform<any, T>;
969 type PipelineDestinationIterableFunction<T> = (source: AsyncIterable<T>) => AsyncIterable<any>;
970 type PipelineDestinationPromiseFunction<T, P> = (source: AsyncIterable<T>) => Promise<P>;
971 type PipelineDestination<S extends PipelineTransformSource<any>, P> = S extends PipelineTransformSource<infer ST>
972 ? NodeJS.WritableStream | PipelineDestinationIterableFunction<ST> | PipelineDestinationPromiseFunction<ST, P>
973 : never;
974 type PipelineCallback<S extends PipelineDestination<any, any>> = S extends PipelineDestinationPromiseFunction<any, infer P>
975 ? (err: NodeJS.ErrnoException | null, value: P) => void
976 : (err: NodeJS.ErrnoException | null) => void;
977 type PipelinePromise<S extends PipelineDestination<any, any>> = S extends PipelineDestinationPromiseFunction<any, infer P> ? Promise<P> : Promise<void>;
978 interface PipelineOptions {
979 signal: AbortSignal;
980 }
981 /**
982 * A module method to pipe between streams and generators forwarding errors and
983 * properly cleaning up and provide a callback when the pipeline is complete.
984 *
985 * ```js
986 * const { pipeline } = require('stream');
987 * const fs = require('fs');
988 * const zlib = require('zlib');
989 *
990 * // Use the pipeline API to easily pipe a series of streams
991 * // together and get notified when the pipeline is fully done.
992 *
993 * // A pipeline to gzip a potentially huge tar file efficiently:
994 *
995 * pipeline(
996 * fs.createReadStream('archive.tar'),
997 * zlib.createGzip(),
998 * fs.createWriteStream('archive.tar.gz'),
999 * (err) => {
1000 * if (err) {
1001 * console.error('Pipeline failed.', err);
1002 * } else {
1003 * console.log('Pipeline succeeded.');
1004 * }
1005 * }
1006 * );
1007 * ```
1008 *
1009 * The `pipeline` API provides a promise version, which can also
1010 * receive an options argument as the last parameter with a`signal` `<AbortSignal>` property. When the signal is aborted,`destroy` will be called on the underlying pipeline, with
1011 * an`AbortError`.
1012 *
1013 * ```js
1014 * const { pipeline } = require('stream/promises');
1015 *
1016 * async function run() {
1017 * await pipeline(
1018 * fs.createReadStream('archive.tar'),
1019 * zlib.createGzip(),
1020 * fs.createWriteStream('archive.tar.gz')
1021 * );
1022 * console.log('Pipeline succeeded.');
1023 * }
1024 *
1025 * run().catch(console.error);
1026 * ```
1027 *
1028 * To use an `AbortSignal`, pass it inside an options object,
1029 * as the last argument:
1030 *
1031 * ```js
1032 * const { pipeline } = require('stream/promises');
1033 *
1034 * async function run() {
1035 * const ac = new AbortController();
1036 * const options = {
1037 * signal: ac.signal,
1038 * };
1039 *
1040 * setTimeout(() => ac.abort(), 1);
1041 * await pipeline(
1042 * fs.createReadStream('archive.tar'),
1043 * zlib.createGzip(),
1044 * fs.createWriteStream('archive.tar.gz'),
1045 * options,
1046 * );
1047 * }
1048 *
1049 * run().catch(console.error); // AbortError
1050 * ```
1051 *
1052 * The `pipeline` API also supports async generators:
1053 *
1054 * ```js
1055 * const { pipeline } = require('stream/promises');
1056 * const fs = require('fs');
1057 *
1058 * async function run() {
1059 * await pipeline(
1060 * fs.createReadStream('lowercase.txt'),
1061 * async function* (source) {
1062 * source.setEncoding('utf8'); // Work with strings rather than `Buffer`s.
1063 * for await (const chunk of source) {
1064 * yield chunk.toUpperCase();
1065 * }
1066 * },
1067 * fs.createWriteStream('uppercase.txt')
1068 * );
1069 * console.log('Pipeline succeeded.');
1070 * }
1071 *
1072 * run().catch(console.error);
1073 * ```
1074 *
1075 * `stream.pipeline()` will call `stream.destroy(err)` on all streams except:
1076 *
1077 * * `Readable` streams which have emitted `'end'` or `'close'`.
1078 * * `Writable` streams which have emitted `'finish'` or `'close'`.
1079 *
1080 * `stream.pipeline()` leaves dangling event listeners on the streams
1081 * after the `callback` has been invoked. In the case of reuse of streams after
1082 * failure, this can cause event listener leaks and swallowed errors.
1083 * @since v10.0.0
1084 * @param callback Called when the pipeline is fully done.
1085 */
1086 function pipeline<A extends PipelineSource<any>, B extends PipelineDestination<A, any>>(
1087 source: A,
1088 destination: B,
1089 callback?: PipelineCallback<B>
1090 ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
1091 function pipeline<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, B extends PipelineDestination<T1, any>>(
1092 source: A,
1093 transform1: T1,
1094 destination: B,
1095 callback?: PipelineCallback<B>
1096 ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
1097 function pipeline<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, B extends PipelineDestination<T2, any>>(
1098 source: A,
1099 transform1: T1,
1100 transform2: T2,
1101 destination: B,
1102 callback?: PipelineCallback<B>
1103 ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
1104 function pipeline<
1105 A extends PipelineSource<any>,
1106 T1 extends PipelineTransform<A, any>,
1107 T2 extends PipelineTransform<T1, any>,
1108 T3 extends PipelineTransform<T2, any>,
1109 B extends PipelineDestination<T3, any>
1110 >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, callback?: PipelineCallback<B>): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
1111 function pipeline<
1112 A extends PipelineSource<any>,
1113 T1 extends PipelineTransform<A, any>,
1114 T2 extends PipelineTransform<T1, any>,
1115 T3 extends PipelineTransform<T2, any>,
1116 T4 extends PipelineTransform<T3, any>,
1117 B extends PipelineDestination<T4, any>
1118 >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, callback?: PipelineCallback<B>): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
1119 function pipeline(
1120 streams: ReadonlyArray<NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream>,
1121 callback?: (err: NodeJS.ErrnoException | null) => void
1122 ): NodeJS.WritableStream;
1123 function pipeline(
1124 stream1: NodeJS.ReadableStream,
1125 stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream,
1126 ...streams: Array<NodeJS.ReadWriteStream | NodeJS.WritableStream | ((err: NodeJS.ErrnoException | null) => void)>
1127 ): NodeJS.WritableStream;
1128 namespace pipeline {
1129 function __promisify__<A extends PipelineSource<any>, B extends PipelineDestination<A, any>>(source: A, destination: B, options?: PipelineOptions): PipelinePromise<B>;
1130 function __promisify__<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, B extends PipelineDestination<T1, any>>(
1131 source: A,
1132 transform1: T1,
1133 destination: B,
1134 options?: PipelineOptions
1135 ): PipelinePromise<B>;
1136 function __promisify__<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, B extends PipelineDestination<T2, any>>(
1137 source: A,
1138 transform1: T1,
1139 transform2: T2,
1140 destination: B,
1141 options?: PipelineOptions
1142 ): PipelinePromise<B>;
1143 function __promisify__<
1144 A extends PipelineSource<any>,
1145 T1 extends PipelineTransform<A, any>,
1146 T2 extends PipelineTransform<T1, any>,
1147 T3 extends PipelineTransform<T2, any>,
1148 B extends PipelineDestination<T3, any>
1149 >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, options?: PipelineOptions): PipelinePromise<B>;
1150 function __promisify__<
1151 A extends PipelineSource<any>,
1152 T1 extends PipelineTransform<A, any>,
1153 T2 extends PipelineTransform<T1, any>,
1154 T3 extends PipelineTransform<T2, any>,
1155 T4 extends PipelineTransform<T3, any>,
1156 B extends PipelineDestination<T4, any>
1157 >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, options?: PipelineOptions): PipelinePromise<B>;
1158 function __promisify__(streams: ReadonlyArray<NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream>, options?: PipelineOptions): Promise<void>;
1159 function __promisify__(
1160 stream1: NodeJS.ReadableStream,
1161 stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream,
1162 ...streams: Array<NodeJS.ReadWriteStream | NodeJS.WritableStream | PipelineOptions>
1163 ): Promise<void>;
1164 }
1165 interface Pipe {
1166 close(): void;
1167 hasRef(): boolean;
1168 ref(): void;
1169 unref(): void;
1170 }
1171 const promises: typeof streamPromises;
1172 }
1173 export = internal;
1174}
1175declare module 'node:stream' {
1176 import stream = require('stream');
1177 export = stream;
1178}