1 |
|
2 |
|
3 |
|
4 |
|
5 |
|
6 |
|
7 |
|
8 |
|
9 |
|
10 |
|
11 |
|
12 |
|
13 |
|
14 |
|
15 |
|
16 |
|
17 |
|
18 |
|
19 | declare module 'stream' {
|
20 | import { EventEmitter, Abortable } from 'node:events';
|
21 | import * as streamPromises from 'node:stream/promises';
|
22 | import * as streamConsumers from 'node:stream/consumers';
|
23 | class internal extends EventEmitter {
|
24 | pipe<T extends NodeJS.WritableStream>(
|
25 | destination: T,
|
26 | options?: {
|
27 | end?: boolean | undefined;
|
28 | }
|
29 | ): T;
|
30 | }
|
31 | namespace internal {
|
32 | class Stream extends internal {
|
33 | constructor(opts?: ReadableOptions);
|
34 | }
|
35 | interface StreamOptions<T extends Stream> extends Abortable {
|
36 | emitClose?: boolean | undefined;
|
37 | highWaterMark?: number | undefined;
|
38 | objectMode?: boolean | undefined;
|
39 | construct?(this: T, callback: (error?: Error | null) => void): void;
|
40 | destroy?(this: T, error: Error | null, callback: (error: Error | null) => void): void;
|
41 | autoDestroy?: boolean | undefined;
|
42 | }
|
43 | interface ReadableOptions extends StreamOptions<Readable> {
|
44 | encoding?: BufferEncoding | undefined;
|
45 | read?(this: Readable, size: number): void;
|
46 | }
|
47 | /**
|
48 | * @since v0.9.4
|
49 | */
|
50 | class Readable extends Stream implements NodeJS.ReadableStream {
|
51 | /**
|
52 | * A utility method for creating Readable Streams out of iterators.
|
53 | */
|
54 | static from(iterable: Iterable<any> | AsyncIterable<any>, options?: ReadableOptions): Readable;
|
55 | /**
|
56 | * Returns whether the stream has been read from or cancelled.
|
57 | * @since v16.8.0
|
58 | */
|
59 | static isDisturbed(stream: Readable | NodeJS.ReadableStream): boolean;
|
60 | /**
|
61 | * Returns whether the stream was destroyed or errored before emitting `'end'`.
|
62 | * @since v16.8.0
|
63 | * @experimental
|
64 | */
|
65 | readonly readableAborted: boolean;
|
66 | /**
|
67 | * Is `true` if it is safe to call `readable.read()`, which means
|
68 | * the stream has not been destroyed or emitted `'error'` or `'end'`.
|
69 | * @since v11.4.0
|
70 | */
|
71 | readable: boolean;
|
72 | /**
|
73 | * Returns whether `'data'` has been emitted.
|
74 | * @since v16.7.0, v14.18.0
|
75 | * @experimental
|
76 | */
|
77 | readonly readableDidRead: boolean;
|
78 | /**
|
79 | * Getter for the property `encoding` of a given `Readable` stream. The `encoding`property can be set using the `readable.setEncoding()` method.
|
80 | * @since v12.7.0
|
81 | */
|
82 | readonly readableEncoding: BufferEncoding | null;
|
83 | /**
|
84 | * Becomes `true` when `'end'` event is emitted.
|
85 | * @since v12.9.0
|
86 | */
|
87 | readonly readableEnded: boolean;
|
88 | /**
|
89 | * This property reflects the current state of a `Readable` stream as described
|
90 | * in the `Three states` section.
|
91 | * @since v9.4.0
|
92 | */
|
93 | readonly readableFlowing: boolean | null;
|
94 | /**
|
95 | * Returns the value of `highWaterMark` passed when creating this `Readable`.
|
96 | * @since v9.3.0
|
97 | */
|
98 | readonly readableHighWaterMark: number;
|
99 | /**
|
100 | * This property contains the number of bytes (or objects) in the queue
|
101 | * ready to be read. The value provides introspection data regarding
|
102 | * the status of the `highWaterMark`.
|
103 | * @since v9.4.0
|
104 | */
|
105 | readonly readableLength: number;
|
106 | /**
|
107 | * Getter for the property `objectMode` of a given `Readable` stream.
|
108 | * @since v12.3.0
|
109 | */
|
110 | readonly readableObjectMode: boolean;
|
111 | /**
|
112 | * Is `true` after `readable.destroy()` has been called.
|
113 | * @since v8.0.0
|
114 | */
|
115 | destroyed: boolean;
|
116 | constructor(opts?: ReadableOptions);
|
117 | _construct?(callback: (error?: Error | null) => void): void;
|
118 | _read(size: number): void;
|
119 | /**
|
120 | * The `readable.read()` method pulls some data out of the internal buffer and
|
121 | * returns it. If no data available to be read, `null` is returned. By default,
|
122 | * the data will be returned as a `Buffer` object unless an encoding has been
|
123 | * specified using the `readable.setEncoding()` method or the stream is operating
|
124 | * in object mode.
|
125 | *
|
126 | * The optional `size` argument specifies a specific number of bytes to read. If`size` bytes are not available to be read, `null` will be returned _unless_the stream has ended, in which
|
127 | * case all of the data remaining in the internal
|
128 | * buffer will be returned.
|
129 | *
|
130 | * If the `size` argument is not specified, all of the data contained in the
|
131 | * internal buffer will be returned.
|
132 | *
|
133 | * The `size` argument must be less than or equal to 1 GiB.
|
134 | *
|
135 | * The `readable.read()` method should only be called on `Readable` streams
|
136 | * operating in paused mode. In flowing mode, `readable.read()` is called
|
137 | * automatically until the internal buffer is fully drained.
|
138 | *
|
139 | * ```js
|
140 | * const readable = getReadableStreamSomehow();
|
141 | *
|
142 | * // 'readable' may be triggered multiple times as data is buffered in
|
143 | * readable.on('readable', () => {
|
144 | * let chunk;
|
145 | * console.log('Stream is readable (new data received in buffer)');
|
146 | *
|
147 | * while (null !== (chunk = readable.read())) {
|
148 | * console.log(`Read ${chunk.length} bytes of data...`);
|
149 | * }
|
150 | * });
|
151 | *
|
152 | * // 'end' will be triggered once when there is no more data available
|
153 | * readable.on('end', () => {
|
154 | * console.log('Reached end of stream.');
|
155 | * });
|
156 | * ```
|
157 | *
|
158 | * Each call to `readable.read()` returns a chunk of data, or `null`. The chunks
|
159 | * are not concatenated. A `while` loop is necessary to consume all data
|
160 | * currently in the buffer. When reading a large file `.read()` may return `null`,
|
161 | * having consumed all buffered content so far, but there is still more data to
|
162 | * come not yet buffered. In this case a new `'readable'` event will be emitted
|
163 | * when there is more data in the buffer. Finally the `'end'` event will be
|
164 | * emitted when there is no more data to come.
|
165 | *
|
166 | * Therefore to read a file's whole contents from a `readable`, it is necessary
|
167 | * to collect chunks across multiple `'readable'` events:
|
168 | *
|
169 | * ```js
|
170 | * const chunks = [];
|
171 | *
|
172 | * readable.on('readable', () => {
|
173 | * let chunk;
|
174 | * while (null !== (chunk = readable.read())) {
|
175 | * chunks.push(chunk);
|
176 | * }
|
177 | * });
|
178 | *
|
179 | * readable.on('end', () => {
|
180 | * const content = chunks.join('');
|
181 | * });
|
182 | * ```
|
183 | *
|
184 | * A `Readable` stream in object mode will always return a single item from
|
185 | * a call to `readable.read(size)`, regardless of the value of the`size` argument.
|
186 | *
|
187 | * If the `readable.read()` method returns a chunk of data, a `'data'` event will
|
188 | * also be emitted.
|
189 | *
|
190 | * Calling {@link read} after the `'end'` event has
|
191 | * been emitted will return `null`. No runtime error will be raised.
|
192 | * @since v0.9.4
|
193 | * @param size Optional argument to specify how much data to read.
|
194 | */
|
195 | read(size?: number): any;
|
196 | /**
|
197 | * The `readable.setEncoding()` method sets the character encoding for
|
198 | * data read from the `Readable` stream.
|
199 | *
|
200 | * By default, no encoding is assigned and stream data will be returned as`Buffer` objects. Setting an encoding causes the stream data
|
201 | * to be returned as strings of the specified encoding rather than as `Buffer`objects. For instance, calling `readable.setEncoding('utf8')` will cause the
|
202 | * output data to be interpreted as UTF-8 data, and passed as strings. Calling`readable.setEncoding('hex')` will cause the data to be encoded in hexadecimal
|
203 | * string format.
|
204 | *
|
205 | * The `Readable` stream will properly handle multi-byte characters delivered
|
206 | * through the stream that would otherwise become improperly decoded if simply
|
207 | * pulled from the stream as `Buffer` objects.
|
208 | *
|
209 | * ```js
|
210 | * const readable = getReadableStreamSomehow();
|
211 | * readable.setEncoding('utf8');
|
212 | * readable.on('data', (chunk) => {
|
213 | * assert.equal(typeof chunk, 'string');
|
214 | * console.log('Got %d characters of string data:', chunk.length);
|
215 | * });
|
216 | * ```
|
217 | * @since v0.9.4
|
218 | * @param encoding The encoding to use.
|
219 | */
|
220 | setEncoding(encoding: BufferEncoding): this;
|
221 | /**
|
222 | * The `readable.pause()` method will cause a stream in flowing mode to stop
|
223 | * emitting `'data'` events, switching out of flowing mode. Any data that
|
224 | * becomes available will remain in the internal buffer.
|
225 | *
|
226 | * ```js
|
227 | * const readable = getReadableStreamSomehow();
|
228 | * readable.on('data', (chunk) => {
|
229 | * console.log(`Received ${chunk.length} bytes of data.`);
|
230 | * readable.pause();
|
231 | * console.log('There will be no additional data for 1 second.');
|
232 | * setTimeout(() => {
|
233 | * console.log('Now data will start flowing again.');
|
234 | * readable.resume();
|
235 | * }, 1000);
|
236 | * });
|
237 | * ```
|
238 | *
|
239 | * The `readable.pause()` method has no effect if there is a `'readable'`event listener.
|
240 | * @since v0.9.4
|
241 | */
|
242 | pause(): this;
|
243 | /**
|
244 | * The `readable.resume()` method causes an explicitly paused `Readable` stream to
|
245 | * resume emitting `'data'` events, switching the stream into flowing mode.
|
246 | *
|
247 | * The `readable.resume()` method can be used to fully consume the data from a
|
248 | * stream without actually processing any of that data:
|
249 | *
|
250 | * ```js
|
251 | * getReadableStreamSomehow()
|
252 | * .resume()
|
253 | * .on('end', () => {
|
254 | * console.log('Reached the end, but did not read anything.');
|
255 | * });
|
256 | * ```
|
257 | *
|
258 | * The `readable.resume()` method has no effect if there is a `'readable'`event listener.
|
259 | * @since v0.9.4
|
260 | */
|
261 | resume(): this;
|
262 | /**
|
263 | * The `readable.isPaused()` method returns the current operating state of the`Readable`. This is used primarily by the mechanism that underlies the`readable.pipe()` method. In most
|
264 | * typical cases, there will be no reason to
|
265 | * use this method directly.
|
266 | *
|
267 | * ```js
|
268 | * const readable = new stream.Readable();
|
269 | *
|
270 | * readable.isPaused(); // === false
|
271 | * readable.pause();
|
272 | * readable.isPaused(); // === true
|
273 | * readable.resume();
|
274 | * readable.isPaused(); // === false
|
275 | * ```
|
276 | * @since v0.11.14
|
277 | */
|
278 | isPaused(): boolean;
|
279 | /**
|
280 | * The `readable.unpipe()` method detaches a `Writable` stream previously attached
|
281 | * using the {@link pipe} method.
|
282 | *
|
283 | * If the `destination` is not specified, then _all_ pipes are detached.
|
284 | *
|
285 | * If the `destination` is specified, but no pipe is set up for it, then
|
286 | * the method does nothing.
|
287 | *
|
288 | * ```js
|
289 | * const fs = require('fs');
|
290 | * const readable = getReadableStreamSomehow();
|
291 | * const writable = fs.createWriteStream('file.txt');
|
292 | * // All the data from readable goes into 'file.txt',
|
293 | * // but only for the first second.
|
294 | * readable.pipe(writable);
|
295 | * setTimeout(() => {
|
296 | * console.log('Stop writing to file.txt.');
|
297 | * readable.unpipe(writable);
|
298 | * console.log('Manually close the file stream.');
|
299 | * writable.end();
|
300 | * }, 1000);
|
301 | * ```
|
302 | * @since v0.9.4
|
303 | * @param destination Optional specific stream to unpipe
|
304 | */
|
305 | unpipe(destination?: NodeJS.WritableStream): this;
|
306 | /**
|
307 | * Passing `chunk` as `null` signals the end of the stream (EOF) and behaves the
|
308 | * same as `readable.push(null)`, after which no more data can be written. The EOF
|
309 | * signal is put at the end of the buffer and any buffered data will still be
|
310 | * flushed.
|
311 | *
|
312 | * The `readable.unshift()` method pushes a chunk of data back into the internal
|
313 | * buffer. This is useful in certain situations where a stream is being consumed by
|
314 | * code that needs to "un-consume" some amount of data that it has optimistically
|
315 | * pulled out of the source, so that the data can be passed on to some other party.
|
316 | *
|
317 | * The `stream.unshift(chunk)` method cannot be called after the `'end'` event
|
318 | * has been emitted or a runtime error will be thrown.
|
319 | *
|
320 | * Developers using `stream.unshift()` often should consider switching to
|
321 | * use of a `Transform` stream instead. See the `API for stream implementers` section for more information.
|
322 | *
|
323 | * ```js
|
324 | * // Pull off a header delimited by \n\n.
|
325 | * // Use unshift() if we get too much.
|
326 | * // Call the callback with (error, header, stream).
|
327 | * const { StringDecoder } = require('string_decoder');
|
328 | * function parseHeader(stream, callback) {
|
329 | * stream.on('error', callback);
|
330 | * stream.on('readable', onReadable);
|
331 | * const decoder = new StringDecoder('utf8');
|
332 | * let header = '';
|
333 | * function onReadable() {
|
334 | * let chunk;
|
335 | * while (null !== (chunk = stream.read())) {
|
336 | * const str = decoder.write(chunk);
|
337 | * if (str.match(/\n\n/)) {
|
338 | * // Found the header boundary.
|
339 | * const split = str.split(/\n\n/);
|
340 | * header += split.shift();
|
341 | * const remaining = split.join('\n\n');
|
342 | * const buf = Buffer.from(remaining, 'utf8');
|
343 | * stream.removeListener('error', callback);
|
344 | * // Remove the 'readable' listener before unshifting.
|
345 | * stream.removeListener('readable', onReadable);
|
346 | * if (buf.length)
|
347 | * stream.unshift(buf);
|
348 | * // Now the body of the message can be read from the stream.
|
349 | * callback(null, header, stream);
|
350 | * } else {
|
351 | * // Still reading the header.
|
352 | * header += str;
|
353 | * }
|
354 | * }
|
355 | * }
|
356 | * }
|
357 | * ```
|
358 | *
|
359 | * Unlike {@link push}, `stream.unshift(chunk)` will not
|
360 | * end the reading process by resetting the internal reading state of the stream.
|
361 | * This can cause unexpected results if `readable.unshift()` is called during a
|
362 | * read (i.e. from within a {@link _read} implementation on a
|
363 | * custom stream). Following the call to `readable.unshift()` with an immediate {@link push} will reset the reading state appropriately,
|
364 | * however it is best to simply avoid calling `readable.unshift()` while in the
|
365 | * process of performing a read.
|
366 | * @since v0.9.11
|
367 | * @param chunk Chunk of data to unshift onto the read queue. For streams not operating in object mode, `chunk` must be a string, `Buffer`, `Uint8Array` or `null`. For object mode
|
368 | * streams, `chunk` may be any JavaScript value.
|
369 | * @param encoding Encoding of string chunks. Must be a valid `Buffer` encoding, such as `'utf8'` or `'ascii'`.
|
370 | */
|
371 | unshift(chunk: any, encoding?: BufferEncoding): void;
|
372 | /**
|
373 | * Prior to Node.js 0.10, streams did not implement the entire `stream` module API
|
374 | * as it is currently defined. (See `Compatibility` for more information.)
|
375 | *
|
376 | * When using an older Node.js library that emits `'data'` events and has a {@link pause} method that is advisory only, the`readable.wrap()` method can be used to create a `Readable`
|
377 | * stream that uses
|
378 | * the old stream as its data source.
|
379 | *
|
380 | * It will rarely be necessary to use `readable.wrap()` but the method has been
|
381 | * provided as a convenience for interacting with older Node.js applications and
|
382 | * libraries.
|
383 | *
|
384 | * ```js
|
385 | * const { OldReader } = require('./old-api-module.js');
|
386 | * const { Readable } = require('stream');
|
387 | * const oreader = new OldReader();
|
388 | * const myReader = new Readable().wrap(oreader);
|
389 | *
|
390 | * myReader.on('readable', () => {
|
391 | * myReader.read();
|
392 | * });
|
393 | * ```
|
394 | * @since v0.9.4
|
395 | * @param stream An "old style" readable stream
|
396 | */
|
397 | wrap(stream: NodeJS.ReadableStream): this;
|
398 | push(chunk: any, encoding?: BufferEncoding): boolean;
|
399 | _destroy(error: Error | null, callback: (error?: Error | null) => void): void;
|
400 | /**
|
401 | * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the readable
|
402 | * stream will release any internal resources and subsequent calls to `push()`will be ignored.
|
403 | *
|
404 | * Once `destroy()` has been called any further calls will be a no-op and no
|
405 | * further errors except from `_destroy()` may be emitted as `'error'`.
|
406 | *
|
407 | * Implementors should not override this method, but instead implement `readable._destroy()`.
|
408 | * @since v8.0.0
|
409 | * @param error Error which will be passed as payload in `'error'` event
|
410 | */
|
411 | destroy(error?: Error): void;
|
412 | /**
|
413 | * Event emitter
|
414 | * The defined events on documents including:
|
415 | * 1. close
|
416 | * 2. data
|
417 | * 3. end
|
418 | * 4. error
|
419 | * 5. pause
|
420 | * 6. readable
|
421 | * 7. resume
|
422 | */
|
423 | addListener(event: 'close', listener: () => void): this;
|
424 | addListener(event: 'data', listener: (chunk: any) => void): this;
|
425 | addListener(event: 'end', listener: () => void): this;
|
426 | addListener(event: 'error', listener: (err: Error) => void): this;
|
427 | addListener(event: 'pause', listener: () => void): this;
|
428 | addListener(event: 'readable', listener: () => void): this;
|
429 | addListener(event: 'resume', listener: () => void): this;
|
430 | addListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
431 | emit(event: 'close'): boolean;
|
432 | emit(event: 'data', chunk: any): boolean;
|
433 | emit(event: 'end'): boolean;
|
434 | emit(event: 'error', err: Error): boolean;
|
435 | emit(event: 'pause'): boolean;
|
436 | emit(event: 'readable'): boolean;
|
437 | emit(event: 'resume'): boolean;
|
438 | emit(event: string | symbol, ...args: any[]): boolean;
|
439 | on(event: 'close', listener: () => void): this;
|
440 | on(event: 'data', listener: (chunk: any) => void): this;
|
441 | on(event: 'end', listener: () => void): this;
|
442 | on(event: 'error', listener: (err: Error) => void): this;
|
443 | on(event: 'pause', listener: () => void): this;
|
444 | on(event: 'readable', listener: () => void): this;
|
445 | on(event: 'resume', listener: () => void): this;
|
446 | on(event: string | symbol, listener: (...args: any[]) => void): this;
|
447 | once(event: 'close', listener: () => void): this;
|
448 | once(event: 'data', listener: (chunk: any) => void): this;
|
449 | once(event: 'end', listener: () => void): this;
|
450 | once(event: 'error', listener: (err: Error) => void): this;
|
451 | once(event: 'pause', listener: () => void): this;
|
452 | once(event: 'readable', listener: () => void): this;
|
453 | once(event: 'resume', listener: () => void): this;
|
454 | once(event: string | symbol, listener: (...args: any[]) => void): this;
|
455 | prependListener(event: 'close', listener: () => void): this;
|
456 | prependListener(event: 'data', listener: (chunk: any) => void): this;
|
457 | prependListener(event: 'end', listener: () => void): this;
|
458 | prependListener(event: 'error', listener: (err: Error) => void): this;
|
459 | prependListener(event: 'pause', listener: () => void): this;
|
460 | prependListener(event: 'readable', listener: () => void): this;
|
461 | prependListener(event: 'resume', listener: () => void): this;
|
462 | prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
463 | prependOnceListener(event: 'close', listener: () => void): this;
|
464 | prependOnceListener(event: 'data', listener: (chunk: any) => void): this;
|
465 | prependOnceListener(event: 'end', listener: () => void): this;
|
466 | prependOnceListener(event: 'error', listener: (err: Error) => void): this;
|
467 | prependOnceListener(event: 'pause', listener: () => void): this;
|
468 | prependOnceListener(event: 'readable', listener: () => void): this;
|
469 | prependOnceListener(event: 'resume', listener: () => void): this;
|
470 | prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
471 | removeListener(event: 'close', listener: () => void): this;
|
472 | removeListener(event: 'data', listener: (chunk: any) => void): this;
|
473 | removeListener(event: 'end', listener: () => void): this;
|
474 | removeListener(event: 'error', listener: (err: Error) => void): this;
|
475 | removeListener(event: 'pause', listener: () => void): this;
|
476 | removeListener(event: 'readable', listener: () => void): this;
|
477 | removeListener(event: 'resume', listener: () => void): this;
|
478 | removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
479 | [Symbol.asyncIterator](): AsyncIterableIterator<any>;
|
480 | }
|
481 | interface WritableOptions extends StreamOptions<Writable> {
|
482 | decodeStrings?: boolean | undefined;
|
483 | defaultEncoding?: BufferEncoding | undefined;
|
484 | write?(this: Writable, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
|
485 | writev?(
|
486 | this: Writable,
|
487 | chunks: Array<{
|
488 | chunk: any;
|
489 | encoding: BufferEncoding;
|
490 | }>,
|
491 | callback: (error?: Error | null) => void
|
492 | ): void;
|
493 | final?(this: Writable, callback: (error?: Error | null) => void): void;
|
494 | }
|
495 | /**
|
496 | * @since v0.9.4
|
497 | */
|
498 | class Writable extends Stream implements NodeJS.WritableStream {
|
499 | /**
|
500 | * Is `true` if it is safe to call `writable.write()`, which means
|
501 | * the stream has not been destroyed, errored or ended.
|
502 | * @since v11.4.0
|
503 | */
|
504 | readonly writable: boolean;
|
505 | /**
|
506 | * Is `true` after `writable.end()` has been called. This property
|
507 | * does not indicate whether the data has been flushed, for this use `writable.writableFinished` instead.
|
508 | * @since v12.9.0
|
509 | */
|
510 | readonly writableEnded: boolean;
|
511 | /**
|
512 | * Is set to `true` immediately before the `'finish'` event is emitted.
|
513 | * @since v12.6.0
|
514 | */
|
515 | readonly writableFinished: boolean;
|
516 | /**
|
517 | * Return the value of `highWaterMark` passed when creating this `Writable`.
|
518 | * @since v9.3.0
|
519 | */
|
520 | readonly writableHighWaterMark: number;
|
521 | /**
|
522 | * This property contains the number of bytes (or objects) in the queue
|
523 | * ready to be written. The value provides introspection data regarding
|
524 | * the status of the `highWaterMark`.
|
525 | * @since v9.4.0
|
526 | */
|
527 | readonly writableLength: number;
|
528 | /**
|
529 | * Getter for the property `objectMode` of a given `Writable` stream.
|
530 | * @since v12.3.0
|
531 | */
|
532 | readonly writableObjectMode: boolean;
|
533 | /**
|
534 | * Number of times `writable.uncork()` needs to be
|
535 | * called in order to fully uncork the stream.
|
536 | * @since v13.2.0, v12.16.0
|
537 | */
|
538 | readonly writableCorked: number;
|
539 | /**
|
540 | * Is `true` after `writable.destroy()` has been called.
|
541 | * @since v8.0.0
|
542 | */
|
543 | destroyed: boolean;
|
544 | constructor(opts?: WritableOptions);
|
545 | _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
|
546 | _writev?(
|
547 | chunks: Array<{
|
548 | chunk: any;
|
549 | encoding: BufferEncoding;
|
550 | }>,
|
551 | callback: (error?: Error | null) => void
|
552 | ): void;
|
553 | _construct?(callback: (error?: Error | null) => void): void;
|
554 | _destroy(error: Error | null, callback: (error?: Error | null) => void): void;
|
555 | _final(callback: (error?: Error | null) => void): void;
|
556 | /**
|
557 | * The `writable.write()` method writes some data to the stream, and calls the
|
558 | * supplied `callback` once the data has been fully handled. If an error
|
559 | * occurs, the `callback` will be called with the error as its
|
560 | * first argument. The `callback` is called asynchronously and before `'error'` is
|
561 | * emitted.
|
562 | *
|
563 | * The return value is `true` if the internal buffer is less than the`highWaterMark` configured when the stream was created after admitting `chunk`.
|
564 | * If `false` is returned, further attempts to write data to the stream should
|
565 | * stop until the `'drain'` event is emitted.
|
566 | *
|
567 | * While a stream is not draining, calls to `write()` will buffer `chunk`, and
|
568 | * return false. Once all currently buffered chunks are drained (accepted for
|
569 | * delivery by the operating system), the `'drain'` event will be emitted.
|
570 | * It is recommended that once `write()` returns false, no more chunks be written
|
571 | * until the `'drain'` event is emitted. While calling `write()` on a stream that
|
572 | * is not draining is allowed, Node.js will buffer all written chunks until
|
573 | * maximum memory usage occurs, at which point it will abort unconditionally.
|
574 | * Even before it aborts, high memory usage will cause poor garbage collector
|
575 | * performance and high RSS (which is not typically released back to the system,
|
576 | * even after the memory is no longer required). Since TCP sockets may never
|
577 | * drain if the remote peer does not read the data, writing a socket that is
|
578 | * not draining may lead to a remotely exploitable vulnerability.
|
579 | *
|
580 | * Writing data while the stream is not draining is particularly
|
581 | * problematic for a `Transform`, because the `Transform` streams are paused
|
582 | * by default until they are piped or a `'data'` or `'readable'` event handler
|
583 | * is added.
|
584 | *
|
585 | * If the data to be written can be generated or fetched on demand, it is
|
586 | * recommended to encapsulate the logic into a `Readable` and use {@link pipe}. However, if calling `write()` is preferred, it is
|
587 | * possible to respect backpressure and avoid memory issues using the `'drain'` event:
|
588 | *
|
589 | * ```js
|
590 | * function write(data, cb) {
|
591 | * if (!stream.write(data)) {
|
592 | * stream.once('drain', cb);
|
593 | * } else {
|
594 | * process.nextTick(cb);
|
595 | * }
|
596 | * }
|
597 | *
|
598 | * // Wait for cb to be called before doing any other write.
|
599 | * write('hello', () => {
|
600 | * console.log('Write completed, do more writes now.');
|
601 | * });
|
602 | * ```
|
603 | *
|
604 | * A `Writable` stream in object mode will always ignore the `encoding` argument.
|
605 | * @since v0.9.4
|
606 | * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any
|
607 | * JavaScript value other than `null`.
|
608 | * @param [encoding='utf8'] The encoding, if `chunk` is a string.
|
609 | * @param callback Callback for when this chunk of data is flushed.
|
610 | * @return `false` if the stream wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
|
611 | */
|
612 | write(chunk: any, callback?: (error: Error | null | undefined) => void): boolean;
|
613 | write(chunk: any, encoding: BufferEncoding, callback?: (error: Error | null | undefined) => void): boolean;
|
614 | /**
|
615 | * The `writable.setDefaultEncoding()` method sets the default `encoding` for a `Writable` stream.
|
616 | * @since v0.11.15
|
617 | * @param encoding The new default encoding
|
618 | */
|
619 | setDefaultEncoding(encoding: BufferEncoding): this;
|
620 | /**
|
621 | * Calling the `writable.end()` method signals that no more data will be written
|
622 | * to the `Writable`. The optional `chunk` and `encoding` arguments allow one
|
623 | * final additional chunk of data to be written immediately before closing the
|
624 | * stream.
|
625 | *
|
626 | * Calling the {@link write} method after calling {@link end} will raise an error.
|
627 | *
|
628 | * ```js
|
629 | * // Write 'hello, ' and then end with 'world!'.
|
630 | * const fs = require('fs');
|
631 | * const file = fs.createWriteStream('example.txt');
|
632 | * file.write('hello, ');
|
633 | * file.end('world!');
|
634 | * // Writing more now is not allowed!
|
635 | * ```
|
636 | * @since v0.9.4
|
637 | * @param chunk Optional data to write. For streams not operating in object mode, `chunk` must be a string, `Buffer` or `Uint8Array`. For object mode streams, `chunk` may be any
|
638 | * JavaScript value other than `null`.
|
639 | * @param encoding The encoding if `chunk` is a string
|
640 | * @param callback Callback for when the stream is finished.
|
641 | */
|
642 | end(cb?: () => void): void;
|
643 | end(chunk: any, cb?: () => void): void;
|
644 | end(chunk: any, encoding: BufferEncoding, cb?: () => void): void;
|
645 | /**
|
646 | * The `writable.cork()` method forces all written data to be buffered in memory.
|
647 | * The buffered data will be flushed when either the {@link uncork} or {@link end} methods are called.
|
648 | *
|
649 | * The primary intent of `writable.cork()` is to accommodate a situation in which
|
650 | * several small chunks are written to the stream in rapid succession. Instead of
|
651 | * immediately forwarding them to the underlying destination, `writable.cork()`buffers all the chunks until `writable.uncork()` is called, which will pass them
|
652 | * all to `writable._writev()`, if present. This prevents a head-of-line blocking
|
653 | * situation where data is being buffered while waiting for the first small chunk
|
654 | * to be processed. However, use of `writable.cork()` without implementing`writable._writev()` may have an adverse effect on throughput.
|
655 | *
|
656 | * See also: `writable.uncork()`, `writable._writev()`.
|
657 | * @since v0.11.2
|
658 | */
|
659 | cork(): void;
|
660 | /**
|
661 | * The `writable.uncork()` method flushes all data buffered since {@link cork} was called.
|
662 | *
|
663 | * When using `writable.cork()` and `writable.uncork()` to manage the buffering
|
664 | * of writes to a stream, it is recommended that calls to `writable.uncork()` be
|
665 | * deferred using `process.nextTick()`. Doing so allows batching of all`writable.write()` calls that occur within a given Node.js event loop phase.
|
666 | *
|
667 | * ```js
|
668 | * stream.cork();
|
669 | * stream.write('some ');
|
670 | * stream.write('data ');
|
671 | * process.nextTick(() => stream.uncork());
|
672 | * ```
|
673 | *
|
674 | * If the `writable.cork()` method is called multiple times on a stream, the
|
675 | * same number of calls to `writable.uncork()` must be called to flush the buffered
|
676 | * data.
|
677 | *
|
678 | * ```js
|
679 | * stream.cork();
|
680 | * stream.write('some ');
|
681 | * stream.cork();
|
682 | * stream.write('data ');
|
683 | * process.nextTick(() => {
|
684 | * stream.uncork();
|
685 | *
|
686 | * stream.uncork();
|
687 | * });
|
688 | * ```
|
689 | *
|
690 | * See also: `writable.cork()`.
|
691 | * @since v0.11.2
|
692 | */
|
693 | uncork(): void;
|
694 | /**
|
695 | * Destroy the stream. Optionally emit an `'error'` event, and emit a `'close'`event (unless `emitClose` is set to `false`). After this call, the writable
|
696 | * stream has ended and subsequent calls to `write()` or `end()` will result in
|
697 | * an `ERR_STREAM_DESTROYED` error.
|
698 | * This is a destructive and immediate way to destroy a stream. Previous calls to`write()` may not have drained, and may trigger an `ERR_STREAM_DESTROYED` error.
|
699 | * Use `end()` instead of destroy if data should flush before close, or wait for
|
700 | * the `'drain'` event before destroying the stream.
|
701 | *
|
702 | * Once `destroy()` has been called any further calls will be a no-op and no
|
703 | * further errors except from `_destroy()` may be emitted as `'error'`.
|
704 | *
|
705 | * Implementors should not override this method,
|
706 | * but instead implement `writable._destroy()`.
|
707 | * @since v8.0.0
|
708 | * @param error Optional, an error to emit with `'error'` event.
|
709 | */
|
710 | destroy(error?: Error): void;
|
711 | /**
|
712 | * Event emitter
|
713 | * The defined events on documents including:
|
714 | * 1. close
|
715 | * 2. drain
|
716 | * 3. error
|
717 | * 4. finish
|
718 | * 5. pipe
|
719 | * 6. unpipe
|
720 | */
|
721 | addListener(event: 'close', listener: () => void): this;
|
722 | addListener(event: 'drain', listener: () => void): this;
|
723 | addListener(event: 'error', listener: (err: Error) => void): this;
|
724 | addListener(event: 'finish', listener: () => void): this;
|
725 | addListener(event: 'pipe', listener: (src: Readable) => void): this;
|
726 | addListener(event: 'unpipe', listener: (src: Readable) => void): this;
|
727 | addListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
728 | emit(event: 'close'): boolean;
|
729 | emit(event: 'drain'): boolean;
|
730 | emit(event: 'error', err: Error): boolean;
|
731 | emit(event: 'finish'): boolean;
|
732 | emit(event: 'pipe', src: Readable): boolean;
|
733 | emit(event: 'unpipe', src: Readable): boolean;
|
734 | emit(event: string | symbol, ...args: any[]): boolean;
|
735 | on(event: 'close', listener: () => void): this;
|
736 | on(event: 'drain', listener: () => void): this;
|
737 | on(event: 'error', listener: (err: Error) => void): this;
|
738 | on(event: 'finish', listener: () => void): this;
|
739 | on(event: 'pipe', listener: (src: Readable) => void): this;
|
740 | on(event: 'unpipe', listener: (src: Readable) => void): this;
|
741 | on(event: string | symbol, listener: (...args: any[]) => void): this;
|
742 | once(event: 'close', listener: () => void): this;
|
743 | once(event: 'drain', listener: () => void): this;
|
744 | once(event: 'error', listener: (err: Error) => void): this;
|
745 | once(event: 'finish', listener: () => void): this;
|
746 | once(event: 'pipe', listener: (src: Readable) => void): this;
|
747 | once(event: 'unpipe', listener: (src: Readable) => void): this;
|
748 | once(event: string | symbol, listener: (...args: any[]) => void): this;
|
749 | prependListener(event: 'close', listener: () => void): this;
|
750 | prependListener(event: 'drain', listener: () => void): this;
|
751 | prependListener(event: 'error', listener: (err: Error) => void): this;
|
752 | prependListener(event: 'finish', listener: () => void): this;
|
753 | prependListener(event: 'pipe', listener: (src: Readable) => void): this;
|
754 | prependListener(event: 'unpipe', listener: (src: Readable) => void): this;
|
755 | prependListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
756 | prependOnceListener(event: 'close', listener: () => void): this;
|
757 | prependOnceListener(event: 'drain', listener: () => void): this;
|
758 | prependOnceListener(event: 'error', listener: (err: Error) => void): this;
|
759 | prependOnceListener(event: 'finish', listener: () => void): this;
|
760 | prependOnceListener(event: 'pipe', listener: (src: Readable) => void): this;
|
761 | prependOnceListener(event: 'unpipe', listener: (src: Readable) => void): this;
|
762 | prependOnceListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
763 | removeListener(event: 'close', listener: () => void): this;
|
764 | removeListener(event: 'drain', listener: () => void): this;
|
765 | removeListener(event: 'error', listener: (err: Error) => void): this;
|
766 | removeListener(event: 'finish', listener: () => void): this;
|
767 | removeListener(event: 'pipe', listener: (src: Readable) => void): this;
|
768 | removeListener(event: 'unpipe', listener: (src: Readable) => void): this;
|
769 | removeListener(event: string | symbol, listener: (...args: any[]) => void): this;
|
770 | }
|
771 | interface DuplexOptions extends ReadableOptions, WritableOptions {
|
772 | allowHalfOpen?: boolean | undefined;
|
773 | readableObjectMode?: boolean | undefined;
|
774 | writableObjectMode?: boolean | undefined;
|
775 | readableHighWaterMark?: number | undefined;
|
776 | writableHighWaterMark?: number | undefined;
|
777 | writableCorked?: number | undefined;
|
778 | construct?(this: Duplex, callback: (error?: Error | null) => void): void;
|
779 | read?(this: Duplex, size: number): void;
|
780 | write?(this: Duplex, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
|
781 | writev?(
|
782 | this: Duplex,
|
783 | chunks: Array<{
|
784 | chunk: any;
|
785 | encoding: BufferEncoding;
|
786 | }>,
|
787 | callback: (error?: Error | null) => void
|
788 | ): void;
|
789 | final?(this: Duplex, callback: (error?: Error | null) => void): void;
|
790 | destroy?(this: Duplex, error: Error | null, callback: (error: Error | null) => void): void;
|
791 | }
|
792 | /**
|
793 | * Duplex streams are streams that implement both the `Readable` and `Writable` interfaces.
|
794 | *
|
795 | * Examples of `Duplex` streams include:
|
796 | *
|
797 | * * `TCP sockets`
|
798 | * * `zlib streams`
|
799 | * * `crypto streams`
|
800 | * @since v0.9.4
|
801 | */
|
802 | class Duplex extends Readable implements Writable {
|
803 | readonly writable: boolean;
|
804 | readonly writableEnded: boolean;
|
805 | readonly writableFinished: boolean;
|
806 | readonly writableHighWaterMark: number;
|
807 | readonly writableLength: number;
|
808 | readonly writableObjectMode: boolean;
|
809 | readonly writableCorked: number;
|
810 | /**
|
811 | * If `false` then the stream will automatically end the writable side when the
|
812 | * readable side ends. Set initially by the `allowHalfOpen` constructor option,
|
813 | * which defaults to `false`.
|
814 | *
|
815 | * This can be changed manually to change the half-open behavior of an existing`Duplex` stream instance, but must be changed before the `'end'` event is
|
816 | * emitted.
|
817 | * @since v0.9.4
|
818 | */
|
819 | allowHalfOpen: boolean;
|
820 | constructor(opts?: DuplexOptions);
|
821 | /**
|
822 | * A utility method for creating duplex streams.
|
823 | *
|
824 | * - `Stream` converts writable stream into writable `Duplex` and readable stream
|
825 | * to `Duplex`.
|
826 | * - `Blob` converts into readable `Duplex`.
|
827 | * - `string` converts into readable `Duplex`.
|
828 | * - `ArrayBuffer` converts into readable `Duplex`.
|
829 | * - `AsyncIterable` converts into a readable `Duplex`. Cannot yield `null`.
|
830 | * - `AsyncGeneratorFunction` converts into a readable/writable transform
|
831 | * `Duplex`. Must take a source `AsyncIterable` as first parameter. Cannot yield
|
832 | * `null`.
|
833 | * - `AsyncFunction` converts into a writable `Duplex`. Must return
|
834 | * either `null` or `undefined`
|
835 | * - `Object ({ writable, readable })` converts `readable` and
|
836 | * `writable` into `Stream` and then combines them into `Duplex` where the
|
837 | * `Duplex` will write to the `writable` and read from the `readable`.
|
838 | * - `Promise` converts into readable `Duplex`. Value `null` is ignored.
|
839 | *
|
840 | * @since v16.8.0
|
841 | */
|
842 | static from(src: Stream | Blob | ArrayBuffer | string | Iterable<any> | AsyncIterable<any> | AsyncGeneratorFunction | Promise<any> | Object): Duplex;
|
843 | _write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
|
844 | _writev?(
|
845 | chunks: Array<{
|
846 | chunk: any;
|
847 | encoding: BufferEncoding;
|
848 | }>,
|
849 | callback: (error?: Error | null) => void
|
850 | ): void;
|
851 | _destroy(error: Error | null, callback: (error: Error | null) => void): void;
|
852 | _final(callback: (error?: Error | null) => void): void;
|
853 | write(chunk: any, encoding?: BufferEncoding, cb?: (error: Error | null | undefined) => void): boolean;
|
854 | write(chunk: any, cb?: (error: Error | null | undefined) => void): boolean;
|
855 | setDefaultEncoding(encoding: BufferEncoding): this;
|
856 | end(cb?: () => void): void;
|
857 | end(chunk: any, cb?: () => void): void;
|
858 | end(chunk: any, encoding?: BufferEncoding, cb?: () => void): void;
|
859 | cork(): void;
|
860 | uncork(): void;
|
861 | }
|
862 | type TransformCallback = (error?: Error | null, data?: any) => void;
|
863 | interface TransformOptions extends DuplexOptions {
|
864 | construct?(this: Transform, callback: (error?: Error | null) => void): void;
|
865 | read?(this: Transform, size: number): void;
|
866 | write?(this: Transform, chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void): void;
|
867 | writev?(
|
868 | this: Transform,
|
869 | chunks: Array<{
|
870 | chunk: any;
|
871 | encoding: BufferEncoding;
|
872 | }>,
|
873 | callback: (error?: Error | null) => void
|
874 | ): void;
|
875 | final?(this: Transform, callback: (error?: Error | null) => void): void;
|
876 | destroy?(this: Transform, error: Error | null, callback: (error: Error | null) => void): void;
|
877 | transform?(this: Transform, chunk: any, encoding: BufferEncoding, callback: TransformCallback): void;
|
878 | flush?(this: Transform, callback: TransformCallback): void;
|
879 | }
|
880 | /**
|
881 | * Transform streams are `Duplex` streams where the output is in some way
|
882 | * related to the input. Like all `Duplex` streams, `Transform` streams
|
883 | * implement both the `Readable` and `Writable` interfaces.
|
884 | *
|
885 | * Examples of `Transform` streams include:
|
886 | *
|
887 | * * `zlib streams`
|
888 | * * `crypto streams`
|
889 | * @since v0.9.4
|
890 | */
|
891 | class Transform extends Duplex {
|
892 | constructor(opts?: TransformOptions);
|
893 | _transform(chunk: any, encoding: BufferEncoding, callback: TransformCallback): void;
|
894 | _flush(callback: TransformCallback): void;
|
895 | }
|
896 | /**
|
897 | * The `stream.PassThrough` class is a trivial implementation of a `Transform` stream that simply passes the input bytes across to the output. Its purpose is
|
898 | * primarily for examples and testing, but there are some use cases where`stream.PassThrough` is useful as a building block for novel sorts of streams.
|
899 | */
|
900 | class PassThrough extends Transform {}
|
901 | /**
|
902 | * Attaches an AbortSignal to a readable or writeable stream. This lets code
|
903 | * control stream destruction using an `AbortController`.
|
904 | *
|
905 | * Calling `abort` on the `AbortController` corresponding to the passed`AbortSignal` will behave the same way as calling `.destroy(new AbortError())`on the stream.
|
906 | *
|
907 | * ```js
|
908 | * const fs = require('fs');
|
909 | *
|
910 | * const controller = new AbortController();
|
911 | * const read = addAbortSignal(
|
912 | * controller.signal,
|
913 | * fs.createReadStream(('object.json'))
|
914 | * );
|
915 | * // Later, abort the operation closing the stream
|
916 | * controller.abort();
|
917 | * ```
|
918 | *
|
919 | * Or using an `AbortSignal` with a readable stream as an async iterable:
|
920 | *
|
921 | * ```js
|
922 | * const controller = new AbortController();
|
923 | * setTimeout(() => controller.abort(), 10_000); // set a timeout
|
924 | * const stream = addAbortSignal(
|
925 | * controller.signal,
|
926 | * fs.createReadStream(('object.json'))
|
927 | * );
|
928 | * (async () => {
|
929 | * try {
|
930 | * for await (const chunk of stream) {
|
931 | * await process(chunk);
|
932 | * }
|
933 | * } catch (e) {
|
934 | * if (e.name === 'AbortError') {
|
935 | *
|
936 | * } else {
|
937 | * throw e;
|
938 | * }
|
939 | * }
|
940 | * })();
|
941 | * ```
|
942 | * @since v15.4.0
|
943 | * @param signal A signal representing possible cancellation
|
944 | * @param stream a stream to attach a signal to
|
945 | */
|
946 | function addAbortSignal<T extends Stream>(signal: AbortSignal, stream: T): T;
|
947 | interface FinishedOptions extends Abortable {
|
948 | error?: boolean | undefined;
|
949 | readable?: boolean | undefined;
|
950 | writable?: boolean | undefined;
|
951 | }
|
952 | /**
|
953 | * A function to get notified when a stream is no longer readable, writable
|
954 | * or has experienced an error or a premature close event.
|
955 | *
|
956 | * ```js
|
957 | * const { finished } = require('stream');
|
958 | *
|
959 | * const rs = fs.createReadStream('archive.tar');
|
960 | *
|
961 | * finished(rs, (err) => {
|
962 | * if (err) {
|
963 | * console.error('Stream failed.', err);
|
964 | * } else {
|
965 | * console.log('Stream is done reading.');
|
966 | * }
|
967 | * });
|
968 | *
|
969 | * rs.resume(); // Drain the stream.
|
970 | * ```
|
971 | *
|
972 | * Especially useful in error handling scenarios where a stream is destroyed
|
973 | * prematurely (like an aborted HTTP request), and will not emit `'end'`or `'finish'`.
|
974 | *
|
975 | * The `finished` API provides promise version:
|
976 | *
|
977 | * ```js
|
978 | * const { finished } = require('stream/promises');
|
979 | *
|
980 | * const rs = fs.createReadStream('archive.tar');
|
981 | *
|
982 | * async function run() {
|
983 | * await finished(rs);
|
984 | * console.log('Stream is done reading.');
|
985 | * }
|
986 | *
|
987 | * run().catch(console.error);
|
988 | * rs.resume(); // Drain the stream.
|
989 | * ```
|
990 | *
|
991 | * `stream.finished()` leaves dangling event listeners (in particular`'error'`, `'end'`, `'finish'` and `'close'`) after `callback` has been
|
992 | * invoked. The reason for this is so that unexpected `'error'` events (due to
|
993 | * incorrect stream implementations) do not cause unexpected crashes.
|
994 | * If this is unwanted behavior then the returned cleanup function needs to be
|
995 | * invoked in the callback:
|
996 | *
|
997 | * ```js
|
998 | * const cleanup = finished(rs, (err) => {
|
999 | * cleanup();
|
1000 | *
|
1001 | * });
|
1002 | * ```
|
1003 | * @since v10.0.0
|
1004 | * @param stream A readable and/or writable stream.
|
1005 | * @param callback A callback function that takes an optional error argument.
|
1006 | * @return A cleanup function which removes all registered listeners.
|
1007 | */
|
1008 | function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options: FinishedOptions, callback: (err?: NodeJS.ErrnoException | null) => void): () => void;
|
1009 | function finished(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, callback: (err?: NodeJS.ErrnoException | null) => void): () => void;
|
1010 | namespace finished {
|
1011 | function __promisify__(stream: NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream, options?: FinishedOptions): Promise<void>;
|
1012 | }
|
1013 | type PipelineSourceFunction<T> = () => Iterable<T> | AsyncIterable<T>;
|
1014 | type PipelineSource<T> = Iterable<T> | AsyncIterable<T> | NodeJS.ReadableStream | PipelineSourceFunction<T>;
|
1015 | type PipelineTransform<S extends PipelineTransformSource<any>, U> =
|
1016 | | NodeJS.ReadWriteStream
|
1017 | | ((source: S extends (...args: any[]) => Iterable<infer ST> | AsyncIterable<infer ST> ? AsyncIterable<ST> : S) => AsyncIterable<U>);
|
1018 | type PipelineTransformSource<T> = PipelineSource<T> | PipelineTransform<any, T>;
|
1019 | type PipelineDestinationIterableFunction<T> = (source: AsyncIterable<T>) => AsyncIterable<any>;
|
1020 | type PipelineDestinationPromiseFunction<T, P> = (source: AsyncIterable<T>) => Promise<P>;
|
1021 | type PipelineDestination<S extends PipelineTransformSource<any>, P> = S extends PipelineTransformSource<infer ST>
|
1022 | ? NodeJS.WritableStream | PipelineDestinationIterableFunction<ST> | PipelineDestinationPromiseFunction<ST, P>
|
1023 | : never;
|
1024 | type PipelineCallback<S extends PipelineDestination<any, any>> = S extends PipelineDestinationPromiseFunction<any, infer P>
|
1025 | ? (err: NodeJS.ErrnoException | null, value: P) => void
|
1026 | : (err: NodeJS.ErrnoException | null) => void;
|
1027 | type PipelinePromise<S extends PipelineDestination<any, any>> = S extends PipelineDestinationPromiseFunction<any, infer P> ? Promise<P> : Promise<void>;
|
1028 | interface PipelineOptions {
|
1029 | signal: AbortSignal;
|
1030 | }
|
1031 | |
1032 |
|
1033 |
|
1034 |
|
1035 |
|
1036 |
|
1037 |
|
1038 |
|
1039 |
|
1040 |
|
1041 |
|
1042 |
|
1043 |
|
1044 |
|
1045 |
|
1046 |
|
1047 |
|
1048 |
|
1049 |
|
1050 |
|
1051 |
|
1052 |
|
1053 |
|
1054 |
|
1055 |
|
1056 |
|
1057 |
|
1058 |
|
1059 |
|
1060 |
|
1061 |
|
1062 |
|
1063 |
|
1064 |
|
1065 |
|
1066 |
|
1067 |
|
1068 |
|
1069 |
|
1070 |
|
1071 |
|
1072 |
|
1073 |
|
1074 |
|
1075 |
|
1076 |
|
1077 |
|
1078 |
|
1079 |
|
1080 |
|
1081 |
|
1082 |
|
1083 |
|
1084 |
|
1085 |
|
1086 |
|
1087 |
|
1088 |
|
1089 |
|
1090 |
|
1091 |
|
1092 |
|
1093 |
|
1094 |
|
1095 |
|
1096 |
|
1097 |
|
1098 |
|
1099 |
|
1100 |
|
1101 |
|
1102 |
|
1103 |
|
1104 |
|
1105 |
|
1106 |
|
1107 |
|
1108 |
|
1109 |
|
1110 |
|
1111 |
|
1112 |
|
1113 |
|
1114 |
|
1115 |
|
1116 |
|
1117 |
|
1118 |
|
1119 |
|
1120 |
|
1121 |
|
1122 |
|
1123 |
|
1124 |
|
1125 |
|
1126 |
|
1127 |
|
1128 |
|
1129 |
|
1130 |
|
1131 |
|
1132 |
|
1133 |
|
1134 |
|
1135 |
|
1136 |
|
1137 |
|
1138 |
|
1139 |
|
1140 |
|
1141 |
|
1142 |
|
1143 |
|
1144 |
|
1145 |
|
1146 |
|
1147 |
|
1148 |
|
1149 |
|
1150 |
|
1151 |
|
1152 |
|
1153 |
|
1154 |
|
1155 |
|
1156 | function pipeline<A extends PipelineSource<any>, B extends PipelineDestination<A, any>>(
|
1157 | source: A,
|
1158 | destination: B,
|
1159 | callback?: PipelineCallback<B>
|
1160 | ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
|
1161 | function pipeline<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, B extends PipelineDestination<T1, any>>(
|
1162 | source: A,
|
1163 | transform1: T1,
|
1164 | destination: B,
|
1165 | callback?: PipelineCallback<B>
|
1166 | ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
|
1167 | function pipeline<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, B extends PipelineDestination<T2, any>>(
|
1168 | source: A,
|
1169 | transform1: T1,
|
1170 | transform2: T2,
|
1171 | destination: B,
|
1172 | callback?: PipelineCallback<B>
|
1173 | ): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
|
1174 | function pipeline<
|
1175 | A extends PipelineSource<any>,
|
1176 | T1 extends PipelineTransform<A, any>,
|
1177 | T2 extends PipelineTransform<T1, any>,
|
1178 | T3 extends PipelineTransform<T2, any>,
|
1179 | B extends PipelineDestination<T3, any>
|
1180 | >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, callback?: PipelineCallback<B>): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
|
1181 | function pipeline<
|
1182 | A extends PipelineSource<any>,
|
1183 | T1 extends PipelineTransform<A, any>,
|
1184 | T2 extends PipelineTransform<T1, any>,
|
1185 | T3 extends PipelineTransform<T2, any>,
|
1186 | T4 extends PipelineTransform<T3, any>,
|
1187 | B extends PipelineDestination<T4, any>
|
1188 | >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, callback?: PipelineCallback<B>): B extends NodeJS.WritableStream ? B : NodeJS.WritableStream;
|
1189 | function pipeline(
|
1190 | streams: ReadonlyArray<NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream>,
|
1191 | callback?: (err: NodeJS.ErrnoException | null) => void
|
1192 | ): NodeJS.WritableStream;
|
1193 | function pipeline(
|
1194 | stream1: NodeJS.ReadableStream,
|
1195 | stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream,
|
1196 | ...streams: Array<NodeJS.ReadWriteStream | NodeJS.WritableStream | ((err: NodeJS.ErrnoException | null) => void)>
|
1197 | ): NodeJS.WritableStream;
|
1198 | namespace pipeline {
|
1199 | function __promisify__<A extends PipelineSource<any>, B extends PipelineDestination<A, any>>(source: A, destination: B, options?: PipelineOptions): PipelinePromise<B>;
|
1200 | function __promisify__<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, B extends PipelineDestination<T1, any>>(
|
1201 | source: A,
|
1202 | transform1: T1,
|
1203 | destination: B,
|
1204 | options?: PipelineOptions
|
1205 | ): PipelinePromise<B>;
|
1206 | function __promisify__<A extends PipelineSource<any>, T1 extends PipelineTransform<A, any>, T2 extends PipelineTransform<T1, any>, B extends PipelineDestination<T2, any>>(
|
1207 | source: A,
|
1208 | transform1: T1,
|
1209 | transform2: T2,
|
1210 | destination: B,
|
1211 | options?: PipelineOptions
|
1212 | ): PipelinePromise<B>;
|
1213 | function __promisify__<
|
1214 | A extends PipelineSource<any>,
|
1215 | T1 extends PipelineTransform<A, any>,
|
1216 | T2 extends PipelineTransform<T1, any>,
|
1217 | T3 extends PipelineTransform<T2, any>,
|
1218 | B extends PipelineDestination<T3, any>
|
1219 | >(source: A, transform1: T1, transform2: T2, transform3: T3, destination: B, options?: PipelineOptions): PipelinePromise<B>;
|
1220 | function __promisify__<
|
1221 | A extends PipelineSource<any>,
|
1222 | T1 extends PipelineTransform<A, any>,
|
1223 | T2 extends PipelineTransform<T1, any>,
|
1224 | T3 extends PipelineTransform<T2, any>,
|
1225 | T4 extends PipelineTransform<T3, any>,
|
1226 | B extends PipelineDestination<T4, any>
|
1227 | >(source: A, transform1: T1, transform2: T2, transform3: T3, transform4: T4, destination: B, options?: PipelineOptions): PipelinePromise<B>;
|
1228 | function __promisify__(streams: ReadonlyArray<NodeJS.ReadableStream | NodeJS.WritableStream | NodeJS.ReadWriteStream>, options?: PipelineOptions): Promise<void>;
|
1229 | function __promisify__(
|
1230 | stream1: NodeJS.ReadableStream,
|
1231 | stream2: NodeJS.ReadWriteStream | NodeJS.WritableStream,
|
1232 | ...streams: Array<NodeJS.ReadWriteStream | NodeJS.WritableStream | PipelineOptions>
|
1233 | ): Promise<void>;
|
1234 | }
|
1235 | interface Pipe {
|
1236 | close(): void;
|
1237 | hasRef(): boolean;
|
1238 | ref(): void;
|
1239 | unref(): void;
|
1240 | }
|
1241 | const promises: typeof streamPromises;
|
1242 | const consumers: typeof streamConsumers;
|
1243 | }
|
1244 | export = internal;
|
1245 | }
|
1246 | declare module 'node:stream' {
|
1247 | import stream = require('stream');
|
1248 | export = stream;
|
1249 | }
|