"use strict"; var __create = Object.create; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __getProtoOf = Object.getPrototypeOf; var __hasOwnProp = Object.prototype.hasOwnProperty; var __name = (target, value) => __defProp(target, "name", { value, configurable: true }); var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( // If the importer is in node compatibility mode or this is not an ESM // file that has been converted to a CommonJS file using a Babel- // compatible transform (i.e. "__esModule" has not been set), then set // "default" to the CommonJS "module.exports" for node compatibility. isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, mod )); var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // src/index.ts var src_exports = {}; __export(src_exports, { BufferSlicer: () => BufferSlicer3, BufferSlicerReadStream: () => BufferSlicerReadStream, BufferSlicerWriteStream: () => BufferSlicerWriteStream, EInvalidUnref: () => EInvalidUnref, EReleasedTwice: () => EReleasedTwice, ETOOBigError: () => ETOOBigError, FdSlicer: () => FdSlicer3, Lock: () => Lock, ReadStream: () => ReadStream, WriteStream: () => WriteStream, createFromBuffer: () => createFromBuffer, createFromFd: () => createFromFd }); module.exports = __toCommonJS(src_exports); // src/buffer-slicer.ts var import_events = require("events"); // src/buffer-slicer-read-stream.ts var import_node_stream = require("stream"); var BufferSlicerReadStream = class extends import_node_stream.PassThrough { static { __name(this, "BufferSlicerReadStream"); } /** * See more {@link BufferSlicerCreateReadOptions.start}. */ start; /** * See more {@link BufferSlicerCreateReadOptions.end}. */ endOffset; /** * Exclusive upper bound offset into the file to stop reading from. * * Defaults to {@link BufferSlicer.buffer} length. */ pos; constructor(bufferSlicer, options) { options = options || {}; super(options); this.start = options.start || 0; this.endOffset = options.end; this.pos = this.endOffset || bufferSlicer.buffer.length; const entireSlice = bufferSlicer.buffer.slice(this.start, this.pos); let offset = 0; while (true) { const nextOffset = offset + bufferSlicer.maxChunkSize; if (nextOffset >= entireSlice.length) { if (offset < entireSlice.length) { this.write(entireSlice.slice(offset, entireSlice.length)); } break; } this.write(entireSlice.slice(offset, nextOffset)); offset = nextOffset; } this.end(); } }; // src/buffer-slicer-write-stream.ts var import_node_stream2 = require("stream"); // src/errors.ts var ETOOBigError = class extends Error { static { __name(this, "ETOOBigError"); } code; constructor() { super("maximum file length exceeded"); this.code = "ETOOBIG"; } }; var EInvalidUnref = class extends Error { static { __name(this, "EInvalidUnref"); } code; constructor() { super("cannot unref when refCount equal or lower than 0"); this.code = "EINVALIDUNREF"; } }; var EReleasedTwice = class extends Error { static { __name(this, "EReleasedTwice"); } code; constructor() { super("release callback called twice"); this.code = "ERELEASEDTWICE"; } }; // src/buffer-slicer-write-stream.ts var BufferSlicerWriteStream = class extends import_node_stream2.Writable { static { __name(this, "BufferSlicerWriteStream"); } bufferSlicer; start; endOffset; bytesWritten; pos; constructor(bufferSlicer, options) { options = options || {}; super(options); this.bufferSlicer = bufferSlicer; this.start = options.start || 0; this.endOffset = options.end == null ? this.bufferSlicer.buffer.length : +options.end; this.bytesWritten = 0; this.pos = this.start; } _write(buffer, _encoding, callback) { const end = this.pos + buffer.length; if (end > this.endOffset) { const err = new ETOOBigError(); callback(err); return; } buffer.copy(this.bufferSlicer.buffer, this.pos, 0, buffer.length); this.bytesWritten += buffer.length; this.pos = end; this.emit("progress"); callback(); } }; // src/buffer-slicer.ts var BufferSlicer3 = class extends import_events.EventEmitter { static { __name(this, "BufferSlicer"); } /** * The number of streams that are currently using this BufferSlicer. */ refCount; /** * The buffer that this BufferSlicer is reading/writing. */ buffer; /** * See more {@link BufferSliceOptions.maxChunkSize}. */ maxChunkSize; constructor(buffer, options) { super(); this.refCount = 0; this.buffer = buffer; this.maxChunkSize = options?.maxChunkSize || Number.MAX_SAFE_INTEGER; } /** * Read from the {@link BufferSlicer.buffer}. * * Equivalent to fs.read, but with concurrency protection. * * @param buffer The buffer to write to. * @param offset The offset to start writing at. * @param length The number of bytes to read. * @param position The offset from the beginning of the file to start reading from. * @param callback The callback that will be called when the read is completed. */ read(buffer, offset, length, position, callback) { const end = position + length; const delta = end - this.buffer.length; const written = delta > 0 ? delta : length; this.buffer.copy(buffer, offset, position, end); setImmediate(() => { callback(null, written); }); } /** * Write to the {@link BufferSlicer.buffer}. * * Equivalent to fs.write, but with concurrency protection. * * @param buffer The buffer to write to. * @param offset The offset to start writing at. * @param length The number of bytes to write. * @param position The offset from the beginning of the file to start writing at. * @param callback The callback that will be called when the write is completed. */ write(buffer, offset, length, position, callback) { buffer.copy(this.buffer, position, offset, offset + length); setImmediate(() => { callback(null, length, buffer); }); } /** * Create a readable stream of the buffer. * * If maxChunkSize was specified (see createFromBuffer()), the read stream will provide chunks of at most that size. * * Normally, the read stream provides the entire range requested in a single chunk, but this can cause performance problems in some circumstances. * * See thejoshwolfe/yauzl#87. * * @param options Options for the read stream */ createReadStream(options) { return new BufferSlicerReadStream(this, options); } /** * Create a writable stream of the buffer. * * @param options Options for the write stream. */ createWriteStream(options) { return new BufferSlicerWriteStream(this, options); } /** * Increase the {@link BufferSlicer.refCount} reference count by 1. */ ref() { this.refCount += 1; } /** * Decrease the {@link BufferSlicer.refCount} reference count by 1. */ unref() { this.refCount -= 1; if (this.refCount < 0) { throw new EInvalidUnref(); } } }; // src/fd-slicer.ts var import_events2 = require("events"); var import_node_fs3 = __toESM(require("fs"), 1); // src/lock.ts var Lock = class { static { __name(this, "Lock"); } /** * The error passed to the release callback, if any. * @private */ #error = null; /** * The queue of pending operations. * @private */ #waiting = []; /** * The queue of listeners waiting for all locks to be released. * @private */ #listeners = []; /** * The number of pending operations. * @private */ #pending = 0; /** * The maximum number of concurrent operations. * @private */ #max; /** * Create a new Pend instance. * * @param max The maximum number of concurrent operations. */ constructor(max = Infinity) { this.#max = max; } /** * The number of pending operations. */ get pending() { return this.#pending; } /** * If there is an error passed to the release callback, then this will be set to that error. */ get error() { return this.#error; } /** * Acquire a resource, if the max concurrency has been reached, then the fn is queued. * * @param fn The function to run. */ acquire(fn) { if (this.#pending < this.#max) { this.#run(fn); } else { this.#waiting.push(fn); } } /** * Wait for all pending operations to be released. * * If there are no pending operations, then the fn is called immediately. * * After pending is 0, the fn is called, and the listeners are cleared. * * @param fn The function to run. */ wait(fn) { if (this.#pending === 0) { setImmediate(() => { fn(null); }); return; } this.#listeners.push(fn); } /** * Run actual or pending fn. * * @fn The function to run. * @private */ #run(fn) { fn(this.#createRelease()); } /** * Create the release function for a pending operation. * * @private */ #createRelease() { this.#pending += 1; let called = false; return (err) => { if (called) throw new EReleasedTwice(); called = true; this.#error = this.#error || err || null; this.#pending -= 1; const nextCallback = this.#waiting.shift(); if (nextCallback && this.#pending < this.#max) { this.#run(nextCallback); } else if (this.#pending === 0) { const listeners = this.#listeners; this.#listeners = []; for (const listener of listeners) { listener(this.#error); } } }; } }; // src/read-stream.ts var import_node_fs = __toESM(require("fs"), 1); var import_node_stream3 = require("stream"); var ReadStream = class extends import_node_stream3.Readable { static { __name(this, "ReadStream"); } /** * See more {@link ReadStreamOptions.start}. */ start; /** * See more {@link ReadStreamOptions.end}. */ endOffset; /** * The current position of the stream in the file descriptor. * * Defaults to {@link ReadStream.start}. */ pos; context; constructor(context, options) { options = options || {}; super(options); this.context = context; this.context.ref(); this.start = options.start || 0; this.endOffset = options.end; this.pos = this.start; } /** * {@inheritDoc} */ _read(n) { if (this.destroyed) return; let toRead = n; if (this.endOffset != null) { toRead = Math.min(toRead, this.endOffset - this.pos); } if (toRead <= 0) { this.destroyed = true; this.push(null); this.context.unref(); return; } this.context.pend.acquire((cb) => { if (this.destroyed) return cb(); const buffer = Buffer.alloc(toRead); import_node_fs.default.read( this.context.fd, buffer, 0, toRead, this.pos, (err, bytesRead) => { if (err) { this.destroy(err); } else if (bytesRead === 0) { this.push(null); } else { this.pos += bytesRead; this.push(buffer.slice(0, bytesRead)); } cb(); } ); }); } /** * {@inheritDoc} */ _destroy(err, callback) { this.context.unref(); callback(err); } }; // src/write-stream.ts var import_node_fs2 = __toESM(require("fs"), 1); var import_node_stream4 = require("stream"); var WriteStream = class extends import_node_stream4.Writable { static { __name(this, "WriteStream"); } /** * See more {@link WriteStreamOptions.start}. */ start; /** * See more {@link WriteStreamOptions.end}. */ endOffset; /** * The number of bytes written so far. */ bytesWritten; /** * The current position of the stream in the file descriptor. * * Defaults to {@link WriteStream.start}. */ pos; context; constructor(context, options) { options = options || {}; super(options); this.context = context; this.context.ref(); this.start = options.start || 0; this.endOffset = options.end == null ? Infinity : +options.end; this.bytesWritten = 0; this.pos = this.start; } /** * {@inheritDoc} */ _write(buffer, _encoding, callback) { if (this.pos + buffer.length > this.endOffset) { const err = new ETOOBigError(); callback(err); return; } this.context.pend.acquire((cb) => { if (this.destroyed) return cb(); import_node_fs2.default.write( this.context.fd, buffer, 0, buffer.length, this.pos, (err, bytes) => { if (err) { cb(); callback(err); } else { this.bytesWritten += bytes; this.pos += bytes; this.emit("progress"); cb(); callback(); } } ); }); } /** * {@inheritDoc} */ _destroy(err, callback) { this.context.unref(); callback(err); } }; // src/fd-slicer.ts var FdSlicer3 = class extends import_events2.EventEmitter { static { __name(this, "FdSlicer"); } /** * The file descriptor that this FdSlicer is reading/writing. */ fd; /** * The number of streams that are currently using this FdSlicer. */ refCount; /** * See more {@link FdSlicerOptions.autoClose}. */ autoClose; /** * The concurrency protection. */ pend; constructor(fd, options) { super(); options = options || {}; this.fd = fd; this.pend = new Lock(1); this.refCount = 0; this.autoClose = !!options.autoClose; } /** * Read from the file descriptor. * * Equivalent to fs.read, but with concurrency protection. * * @param buffer The buffer to write to. * @param offset The offset to start writing at. * @param length The number of bytes to read. * @param position The offset from the beginning of the file to start reading from. * @param callback The callback that will be called when the read is completed. */ read(buffer, offset, length, position, callback) { this.pend.acquire((cb) => { import_node_fs3.default.read( this.fd, buffer, offset, length, position, (err, bytesRead, buffer2) => { cb(); callback(err, bytesRead, buffer2); } ); }); } /** * Write to the file descriptor. * * Equivalent to fs.write, but with concurrency protection. * * @param buffer The buffer to write to. * @param offset The offset to start writing at. * @param length The number of bytes to write. * @param position The offset from the beginning of the file to start writing at. * @param callback The callback that will be called when the write is completed. */ write(buffer, offset, length, position, callback) { this.pend.acquire((cb) => { import_node_fs3.default.write( this.fd, buffer, offset, length, position, (err, bytesWritten, buffer2) => { cb(); callback(err, bytesWritten, buffer2); } ); }); } /** * Create a readable stream of the file descriptor. * * @param options Options for the stream. */ createReadStream(options) { return new ReadStream(this, options); } /** * Create a writable stream of the file descriptor. * * @param options Options for the stream. */ createWriteStream(options) { return new WriteStream(this, options); } /** * Increase the {@link FdSlicer.refCount} reference count by 1. * * See more {@link FdSlicerOptions.autoClose}. */ ref() { this.refCount += 1; } /** * Decrease the {@link FdSlicer.refCount} reference count by 1. * * See more {@link FdSlicerOptions.autoClose}. */ unref() { this.refCount -= 1; if (this.refCount > 0) return; if (this.refCount < 0) throw new EInvalidUnref(); if (this.autoClose) { import_node_fs3.default.close(this.fd, (err) => { if (err) { this.emit("error", err); } else { this.emit("close"); } }); } } }; // src/create.ts function createFromBuffer(buffer, options) { return new BufferSlicer3(buffer, options); } __name(createFromBuffer, "createFromBuffer"); function createFromFd(fd, options) { return new FdSlicer3(fd, options); } __name(createFromFd, "createFromFd"); // Annotate the CommonJS export names for ESM import in node: 0 && (module.exports = { BufferSlicer, BufferSlicerReadStream, BufferSlicerWriteStream, EInvalidUnref, EReleasedTwice, ETOOBigError, FdSlicer, Lock, ReadStream, WriteStream, createFromBuffer, createFromFd }); //# sourceMappingURL=index.cjs.map