UNPKG

16.5 kBJavaScriptView Raw
1// Copyright 2013 Lovell Fuller and others.
2// SPDX-License-Identifier: Apache-2.0
3
4'use strict';
5
6const util = require('node:util');
7const stream = require('node:stream');
8const is = require('./is');
9
10require('./sharp');
11
12// Use NODE_DEBUG=sharp to enable libvips warnings
13const debuglog = util.debuglog('sharp');
14
15/**
16 * Constructor factory to create an instance of `sharp`, to which further methods are chained.
17 *
18 * JPEG, PNG, WebP, GIF, AVIF or TIFF format image data can be streamed out from this object.
19 * When using Stream based output, derived attributes are available from the `info` event.
20 *
21 * Non-critical problems encountered during processing are emitted as `warning` events.
22 *
23 * Implements the [stream.Duplex](http://nodejs.org/api/stream.html#stream_class_stream_duplex) class.
24 *
25 * When loading more than one page/frame of an animated image,
26 * these are combined as a vertically-stacked "toilet roll" image
27 * where the overall height is the `pageHeight` multiplied by the number of `pages`.
28 *
29 * @constructs Sharp
30 *
31 * @emits Sharp#info
32 * @emits Sharp#warning
33 *
34 * @example
35 * sharp('input.jpg')
36 * .resize(300, 200)
37 * .toFile('output.jpg', function(err) {
38 * // output.jpg is a 300 pixels wide and 200 pixels high image
39 * // containing a scaled and cropped version of input.jpg
40 * });
41 *
42 * @example
43 * // Read image data from remote URL,
44 * // resize to 300 pixels wide,
45 * // emit an 'info' event with calculated dimensions
46 * // and finally write image data to writableStream
47 * const { body } = fetch('https://...');
48 * const readableStream = Readable.fromWeb(body);
49 * const transformer = sharp()
50 * .resize(300)
51 * .on('info', ({ height }) => {
52 * console.log(`Image height is ${height}`);
53 * });
54 * readableStream.pipe(transformer).pipe(writableStream);
55 *
56 * @example
57 * // Create a blank 300x200 PNG image of semi-translucent red pixels
58 * sharp({
59 * create: {
60 * width: 300,
61 * height: 200,
62 * channels: 4,
63 * background: { r: 255, g: 0, b: 0, alpha: 0.5 }
64 * }
65 * })
66 * .png()
67 * .toBuffer()
68 * .then( ... );
69 *
70 * @example
71 * // Convert an animated GIF to an animated WebP
72 * await sharp('in.gif', { animated: true }).toFile('out.webp');
73 *
74 * @example
75 * // Read a raw array of pixels and save it to a png
76 * const input = Uint8Array.from([255, 255, 255, 0, 0, 0]); // or Uint8ClampedArray
77 * const image = sharp(input, {
78 * // because the input does not contain its dimensions or how many channels it has
79 * // we need to specify it in the constructor options
80 * raw: {
81 * width: 2,
82 * height: 1,
83 * channels: 3
84 * }
85 * });
86 * await image.toFile('my-two-pixels.png');
87 *
88 * @example
89 * // Generate RGB Gaussian noise
90 * await sharp({
91 * create: {
92 * width: 300,
93 * height: 200,
94 * channels: 3,
95 * noise: {
96 * type: 'gaussian',
97 * mean: 128,
98 * sigma: 30
99 * }
100 * }
101 * }).toFile('noise.png');
102 *
103 * @example
104 * // Generate an image from text
105 * await sharp({
106 * text: {
107 * text: 'Hello, world!',
108 * width: 400, // max width
109 * height: 300 // max height
110 * }
111 * }).toFile('text_bw.png');
112 *
113 * @example
114 * // Generate an rgba image from text using pango markup and font
115 * await sharp({
116 * text: {
117 * text: '<span foreground="red">Red!</span><span background="cyan">blue</span>',
118 * font: 'sans',
119 * rgba: true,
120 * dpi: 300
121 * }
122 * }).toFile('text_rgba.png');
123 *
124 * @param {(Buffer|ArrayBuffer|Uint8Array|Uint8ClampedArray|Int8Array|Uint16Array|Int16Array|Uint32Array|Int32Array|Float32Array|Float64Array|string)} [input] - if present, can be
125 * a Buffer / ArrayBuffer / Uint8Array / Uint8ClampedArray containing JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image data, or
126 * a TypedArray containing raw pixel image data, or
127 * a String containing the filesystem path to an JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image file.
128 * JPEG, PNG, WebP, AVIF, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present.
129 * @param {Object} [options] - if present, is an Object with optional attributes.
130 * @param {string} [options.failOn='warning'] - When to abort processing of invalid pixel data, one of (in order of sensitivity, least to most): 'none', 'truncated', 'error', 'warning'. Higher levels imply lower levels. Invalid metadata will always abort.
131 * @param {number|boolean} [options.limitInputPixels=268402689] - Do not process input images where the number of pixels
132 * (width x height) exceeds this limit. Assumes image dimensions contained in the input metadata can be trusted.
133 * An integral Number of pixels, zero or false to remove limit, true to use default limit of 268402689 (0x3FFF x 0x3FFF).
134 * @param {boolean} [options.unlimited=false] - Set this to `true` to remove safety features that help prevent memory exhaustion (JPEG, PNG, SVG, HEIF).
135 * @param {boolean} [options.sequentialRead=true] - Set this to `false` to use random access rather than sequential read. Some operations will do this automatically.
136 * @param {number} [options.density=72] - number representing the DPI for vector images in the range 1 to 100000.
137 * @param {number} [options.ignoreIcc=false] - should the embedded ICC profile, if any, be ignored.
138 * @param {number} [options.pages=1] - Number of pages to extract for multi-page input (GIF, WebP, TIFF), use -1 for all pages.
139 * @param {number} [options.page=0] - Page number to start extracting from for multi-page input (GIF, WebP, TIFF), zero based.
140 * @param {number} [options.subifd=-1] - subIFD (Sub Image File Directory) to extract for OME-TIFF, defaults to main image.
141 * @param {number} [options.level=0] - level to extract from a multi-level input (OpenSlide), zero based.
142 * @param {boolean} [options.animated=false] - Set to `true` to read all frames/pages of an animated image (GIF, WebP, TIFF), equivalent of setting `pages` to `-1`.
143 * @param {Object} [options.raw] - describes raw pixel input image data. See `raw()` for pixel ordering.
144 * @param {number} [options.raw.width] - integral number of pixels wide.
145 * @param {number} [options.raw.height] - integral number of pixels high.
146 * @param {number} [options.raw.channels] - integral number of channels, between 1 and 4.
147 * @param {boolean} [options.raw.premultiplied] - specifies that the raw input has already been premultiplied, set to `true`
148 * to avoid sharp premultiplying the image. (optional, default `false`)
149 * @param {Object} [options.create] - describes a new image to be created.
150 * @param {number} [options.create.width] - integral number of pixels wide.
151 * @param {number} [options.create.height] - integral number of pixels high.
152 * @param {number} [options.create.channels] - integral number of channels, either 3 (RGB) or 4 (RGBA).
153 * @param {string|Object} [options.create.background] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
154 * @param {Object} [options.create.noise] - describes a noise to be created.
155 * @param {string} [options.create.noise.type] - type of generated noise, currently only `gaussian` is supported.
156 * @param {number} [options.create.noise.mean] - mean of pixels in generated noise.
157 * @param {number} [options.create.noise.sigma] - standard deviation of pixels in generated noise.
158 * @param {Object} [options.text] - describes a new text image to be created.
159 * @param {string} [options.text.text] - text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`.
160 * @param {string} [options.text.font] - font name to render with.
161 * @param {string} [options.text.fontfile] - absolute filesystem path to a font file that can be used by `font`.
162 * @param {number} [options.text.width=0] - Integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries.
163 * @param {number} [options.text.height=0] - Maximum integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0.
164 * @param {string} [options.text.align='left'] - Alignment style for multi-line text (`'left'`, `'centre'`, `'center'`, `'right'`).
165 * @param {boolean} [options.text.justify=false] - set this to true to apply justification to the text.
166 * @param {number} [options.text.dpi=72] - the resolution (size) at which to render the text. Does not take effect if `height` is specified.
167 * @param {boolean} [options.text.rgba=false] - set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`.
168 * @param {number} [options.text.spacing=0] - text line height in points. Will use the font line height if none is specified.
169 * @param {string} [options.text.wrap='word'] - word wrapping style when width is provided, one of: 'word', 'char', 'word-char' (prefer word, fallback to char) or 'none'.
170 * @returns {Sharp}
171 * @throws {Error} Invalid parameters
172 */
173const Sharp = function (input, options) {
174 if (arguments.length === 1 && !is.defined(input)) {
175 throw new Error('Invalid input');
176 }
177 if (!(this instanceof Sharp)) {
178 return new Sharp(input, options);
179 }
180 stream.Duplex.call(this);
181 this.options = {
182 // resize options
183 topOffsetPre: -1,
184 leftOffsetPre: -1,
185 widthPre: -1,
186 heightPre: -1,
187 topOffsetPost: -1,
188 leftOffsetPost: -1,
189 widthPost: -1,
190 heightPost: -1,
191 width: -1,
192 height: -1,
193 canvas: 'crop',
194 position: 0,
195 resizeBackground: [0, 0, 0, 255],
196 useExifOrientation: false,
197 angle: 0,
198 rotationAngle: 0,
199 rotationBackground: [0, 0, 0, 255],
200 rotateBeforePreExtract: false,
201 flip: false,
202 flop: false,
203 extendTop: 0,
204 extendBottom: 0,
205 extendLeft: 0,
206 extendRight: 0,
207 extendBackground: [0, 0, 0, 255],
208 extendWith: 'background',
209 withoutEnlargement: false,
210 withoutReduction: false,
211 affineMatrix: [],
212 affineBackground: [0, 0, 0, 255],
213 affineIdx: 0,
214 affineIdy: 0,
215 affineOdx: 0,
216 affineOdy: 0,
217 affineInterpolator: this.constructor.interpolators.bilinear,
218 kernel: 'lanczos3',
219 fastShrinkOnLoad: true,
220 // operations
221 tint: [-1, 0, 0, 0],
222 flatten: false,
223 flattenBackground: [0, 0, 0],
224 unflatten: false,
225 negate: false,
226 negateAlpha: true,
227 medianSize: 0,
228 blurSigma: 0,
229 precision: 'integer',
230 minAmpl: 0.2,
231 sharpenSigma: 0,
232 sharpenM1: 1,
233 sharpenM2: 2,
234 sharpenX1: 2,
235 sharpenY2: 10,
236 sharpenY3: 20,
237 threshold: 0,
238 thresholdGrayscale: true,
239 trimBackground: [],
240 trimThreshold: -1,
241 trimLineArt: false,
242 gamma: 0,
243 gammaOut: 0,
244 greyscale: false,
245 normalise: false,
246 normaliseLower: 1,
247 normaliseUpper: 99,
248 claheWidth: 0,
249 claheHeight: 0,
250 claheMaxSlope: 3,
251 brightness: 1,
252 saturation: 1,
253 hue: 0,
254 lightness: 0,
255 booleanBufferIn: null,
256 booleanFileIn: '',
257 joinChannelIn: [],
258 extractChannel: -1,
259 removeAlpha: false,
260 ensureAlpha: -1,
261 colourspace: 'srgb',
262 colourspacePipeline: 'last',
263 composite: [],
264 // output
265 fileOut: '',
266 formatOut: 'input',
267 streamOut: false,
268 keepMetadata: 0,
269 withMetadataOrientation: -1,
270 withMetadataDensity: 0,
271 withIccProfile: '',
272 withExif: {},
273 withExifMerge: true,
274 resolveWithObject: false,
275 // output format
276 jpegQuality: 80,
277 jpegProgressive: false,
278 jpegChromaSubsampling: '4:2:0',
279 jpegTrellisQuantisation: false,
280 jpegOvershootDeringing: false,
281 jpegOptimiseScans: false,
282 jpegOptimiseCoding: true,
283 jpegQuantisationTable: 0,
284 pngProgressive: false,
285 pngCompressionLevel: 6,
286 pngAdaptiveFiltering: false,
287 pngPalette: false,
288 pngQuality: 100,
289 pngEffort: 7,
290 pngBitdepth: 8,
291 pngDither: 1,
292 jp2Quality: 80,
293 jp2TileHeight: 512,
294 jp2TileWidth: 512,
295 jp2Lossless: false,
296 jp2ChromaSubsampling: '4:4:4',
297 webpQuality: 80,
298 webpAlphaQuality: 100,
299 webpLossless: false,
300 webpNearLossless: false,
301 webpSmartSubsample: false,
302 webpPreset: 'default',
303 webpEffort: 4,
304 webpMinSize: false,
305 webpMixed: false,
306 gifBitdepth: 8,
307 gifEffort: 7,
308 gifDither: 1,
309 gifInterFrameMaxError: 0,
310 gifInterPaletteMaxError: 3,
311 gifReuse: true,
312 gifProgressive: false,
313 tiffQuality: 80,
314 tiffCompression: 'jpeg',
315 tiffPredictor: 'horizontal',
316 tiffPyramid: false,
317 tiffMiniswhite: false,
318 tiffBitdepth: 8,
319 tiffTile: false,
320 tiffTileHeight: 256,
321 tiffTileWidth: 256,
322 tiffXres: 1.0,
323 tiffYres: 1.0,
324 tiffResolutionUnit: 'inch',
325 heifQuality: 50,
326 heifLossless: false,
327 heifCompression: 'av1',
328 heifEffort: 4,
329 heifChromaSubsampling: '4:4:4',
330 heifBitdepth: 8,
331 jxlDistance: 1,
332 jxlDecodingTier: 0,
333 jxlEffort: 7,
334 jxlLossless: false,
335 rawDepth: 'uchar',
336 tileSize: 256,
337 tileOverlap: 0,
338 tileContainer: 'fs',
339 tileLayout: 'dz',
340 tileFormat: 'last',
341 tileDepth: 'last',
342 tileAngle: 0,
343 tileSkipBlanks: -1,
344 tileBackground: [255, 255, 255, 255],
345 tileCentre: false,
346 tileId: 'https://example.com/iiif',
347 tileBasename: '',
348 timeoutSeconds: 0,
349 linearA: [],
350 linearB: [],
351 // Function to notify of libvips warnings
352 debuglog: warning => {
353 this.emit('warning', warning);
354 debuglog(warning);
355 },
356 // Function to notify of queue length changes
357 queueListener: function (queueLength) {
358 Sharp.queue.emit('change', queueLength);
359 }
360 };
361 this.options.input = this._createInputDescriptor(input, options, { allowStream: true });
362 return this;
363};
364Object.setPrototypeOf(Sharp.prototype, stream.Duplex.prototype);
365Object.setPrototypeOf(Sharp, stream.Duplex);
366
367/**
368 * Take a "snapshot" of the Sharp instance, returning a new instance.
369 * Cloned instances inherit the input of their parent instance.
370 * This allows multiple output Streams and therefore multiple processing pipelines to share a single input Stream.
371 *
372 * @example
373 * const pipeline = sharp().rotate();
374 * pipeline.clone().resize(800, 600).pipe(firstWritableStream);
375 * pipeline.clone().extract({ left: 20, top: 20, width: 100, height: 100 }).pipe(secondWritableStream);
376 * readableStream.pipe(pipeline);
377 * // firstWritableStream receives auto-rotated, resized readableStream
378 * // secondWritableStream receives auto-rotated, extracted region of readableStream
379 *
380 * @example
381 * // Create a pipeline that will download an image, resize it and format it to different files
382 * // Using Promises to know when the pipeline is complete
383 * const fs = require("fs");
384 * const got = require("got");
385 * const sharpStream = sharp({ failOn: 'none' });
386 *
387 * const promises = [];
388 *
389 * promises.push(
390 * sharpStream
391 * .clone()
392 * .jpeg({ quality: 100 })
393 * .toFile("originalFile.jpg")
394 * );
395 *
396 * promises.push(
397 * sharpStream
398 * .clone()
399 * .resize({ width: 500 })
400 * .jpeg({ quality: 80 })
401 * .toFile("optimized-500.jpg")
402 * );
403 *
404 * promises.push(
405 * sharpStream
406 * .clone()
407 * .resize({ width: 500 })
408 * .webp({ quality: 80 })
409 * .toFile("optimized-500.webp")
410 * );
411 *
412 * // https://github.com/sindresorhus/got/blob/main/documentation/3-streams.md
413 * got.stream("https://www.example.com/some-file.jpg").pipe(sharpStream);
414 *
415 * Promise.all(promises)
416 * .then(res => { console.log("Done!", res); })
417 * .catch(err => {
418 * console.error("Error processing files, let's clean it up", err);
419 * try {
420 * fs.unlinkSync("originalFile.jpg");
421 * fs.unlinkSync("optimized-500.jpg");
422 * fs.unlinkSync("optimized-500.webp");
423 * } catch (e) {}
424 * });
425 *
426 * @returns {Sharp}
427 */
428function clone () {
429 // Clone existing options
430 const clone = this.constructor.call();
431 const { debuglog, queueListener, ...options } = this.options;
432 clone.options = structuredClone(options);
433 clone.options.debuglog = debuglog;
434 clone.options.queueListener = queueListener;
435 // Pass 'finish' event to clone for Stream-based input
436 if (this._isStreamInput()) {
437 this.on('finish', () => {
438 // Clone inherits input data
439 this._flattenBufferIn();
440 clone.options.input.buffer = this.options.input.buffer;
441 clone.emit('finish');
442 });
443 }
444 return clone;
445}
446Object.assign(Sharp.prototype, { clone });
447
448/**
449 * Export constructor.
450 * @private
451 */
452module.exports = Sharp;