1 | // Copyright 2013 Lovell Fuller and others.
|
2 | // SPDX-License-Identifier: Apache-2.0
|
3 |
|
4 | ;
|
5 |
|
6 | const util = require('util');
|
7 | const stream = require('stream');
|
8 | const is = require('./is');
|
9 |
|
10 | require('./libvips').hasVendoredLibvips();
|
11 | require('./sharp');
|
12 |
|
13 | // Use NODE_DEBUG=sharp to enable libvips warnings
|
14 | const debuglog = util.debuglog('sharp');
|
15 |
|
16 | /**
|
17 | * Constructor factory to create an instance of `sharp`, to which further methods are chained.
|
18 | *
|
19 | * JPEG, PNG, WebP, GIF, AVIF or TIFF format image data can be streamed out from this object.
|
20 | * When using Stream based output, derived attributes are available from the `info` event.
|
21 | *
|
22 | * Non-critical problems encountered during processing are emitted as `warning` events.
|
23 | *
|
24 | * Implements the [stream.Duplex](http://nodejs.org/api/stream.html#stream_class_stream_duplex) class.
|
25 | *
|
26 | * @constructs Sharp
|
27 | *
|
28 | * @emits Sharp#info
|
29 | * @emits Sharp#warning
|
30 | *
|
31 | * @example
|
32 | * sharp('input.jpg')
|
33 | * .resize(300, 200)
|
34 | * .toFile('output.jpg', function(err) {
|
35 | * // output.jpg is a 300 pixels wide and 200 pixels high image
|
36 | * // containing a scaled and cropped version of input.jpg
|
37 | * });
|
38 | *
|
39 | * @example
|
40 | * // Read image data from readableStream,
|
41 | * // resize to 300 pixels wide,
|
42 | * // emit an 'info' event with calculated dimensions
|
43 | * // and finally write image data to writableStream
|
44 | * var transformer = sharp()
|
45 | * .resize(300)
|
46 | * .on('info', function(info) {
|
47 | * console.log('Image height is ' + info.height);
|
48 | * });
|
49 | * readableStream.pipe(transformer).pipe(writableStream);
|
50 | *
|
51 | * @example
|
52 | * // Create a blank 300x200 PNG image of semi-transluent red pixels
|
53 | * sharp({
|
54 | * create: {
|
55 | * width: 300,
|
56 | * height: 200,
|
57 | * channels: 4,
|
58 | * background: { r: 255, g: 0, b: 0, alpha: 0.5 }
|
59 | * }
|
60 | * })
|
61 | * .png()
|
62 | * .toBuffer()
|
63 | * .then( ... );
|
64 | *
|
65 | * @example
|
66 | * // Convert an animated GIF to an animated WebP
|
67 | * await sharp('in.gif', { animated: true }).toFile('out.webp');
|
68 | *
|
69 | * @example
|
70 | * // Read a raw array of pixels and save it to a png
|
71 | * const input = Uint8Array.from([255, 255, 255, 0, 0, 0]); // or Uint8ClampedArray
|
72 | * const image = sharp(input, {
|
73 | * // because the input does not contain its dimensions or how many channels it has
|
74 | * // we need to specify it in the constructor options
|
75 | * raw: {
|
76 | * width: 2,
|
77 | * height: 1,
|
78 | * channels: 3
|
79 | * }
|
80 | * });
|
81 | * await image.toFile('my-two-pixels.png');
|
82 | *
|
83 | * @example
|
84 | * // Generate RGB Gaussian noise
|
85 | * await sharp({
|
86 | * create: {
|
87 | * width: 300,
|
88 | * height: 200,
|
89 | * channels: 3,
|
90 | * noise: {
|
91 | * type: 'gaussian',
|
92 | * mean: 128,
|
93 | * sigma: 30
|
94 | * }
|
95 | * }
|
96 | * }).toFile('noise.png');
|
97 | *
|
98 | * @example
|
99 | * // Generate an image from text
|
100 | * await sharp({
|
101 | * text: {
|
102 | * text: 'Hello, world!',
|
103 | * width: 400, // max width
|
104 | * height: 300 // max height
|
105 | * }
|
106 | * }).toFile('text_bw.png');
|
107 | *
|
108 | * @example
|
109 | * // Generate an rgba image from text using pango markup and font
|
110 | * await sharp({
|
111 | * text: {
|
112 | * text: '<span foreground="red">Red!</span><span background="cyan">blue</span>',
|
113 | * font: 'sans',
|
114 | * rgba: true,
|
115 | * dpi: 300
|
116 | * }
|
117 | * }).toFile('text_rgba.png');
|
118 | *
|
119 | * @param {(Buffer|ArrayBuffer|Uint8Array|Uint8ClampedArray|Int8Array|Uint16Array|Int16Array|Uint32Array|Int32Array|Float32Array|Float64Array|string)} [input] - if present, can be
|
120 | * a Buffer / ArrayBuffer / Uint8Array / Uint8ClampedArray containing JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image data, or
|
121 | * a TypedArray containing raw pixel image data, or
|
122 | * a String containing the filesystem path to an JPEG, PNG, WebP, AVIF, GIF, SVG or TIFF image file.
|
123 | * JPEG, PNG, WebP, AVIF, GIF, SVG, TIFF or raw pixel image data can be streamed into the object when not present.
|
124 | * @param {Object} [options] - if present, is an Object with optional attributes.
|
125 | * @param {string} [options.failOn='warning'] - when to abort processing of invalid pixel data, one of (in order of sensitivity): 'none' (least), 'truncated', 'error' or 'warning' (most), highers level imply lower levels, invalid metadata will always abort.
|
126 | * @param {number|boolean} [options.limitInputPixels=268402689] - Do not process input images where the number of pixels
|
127 | * (width x height) exceeds this limit. Assumes image dimensions contained in the input metadata can be trusted.
|
128 | * An integral Number of pixels, zero or false to remove limit, true to use default limit of 268402689 (0x3FFF x 0x3FFF).
|
129 | * @param {boolean} [options.unlimited=false] - Set this to `true` to remove safety features that help prevent memory exhaustion (JPEG, PNG, SVG, HEIF).
|
130 | * @param {boolean} [options.sequentialRead=true] - Set this to `false` to use random access rather than sequential read. Some operations will do this automatically.
|
131 | * @param {number} [options.density=72] - number representing the DPI for vector images in the range 1 to 100000.
|
132 | * @param {number} [options.ignoreIcc=false] - should the embedded ICC profile, if any, be ignored.
|
133 | * @param {number} [options.pages=1] - Number of pages to extract for multi-page input (GIF, WebP, TIFF), use -1 for all pages.
|
134 | * @param {number} [options.page=0] - Page number to start extracting from for multi-page input (GIF, WebP, TIFF), zero based.
|
135 | * @param {number} [options.subifd=-1] - subIFD (Sub Image File Directory) to extract for OME-TIFF, defaults to main image.
|
136 | * @param {number} [options.level=0] - level to extract from a multi-level input (OpenSlide), zero based.
|
137 | * @param {boolean} [options.animated=false] - Set to `true` to read all frames/pages of an animated image (GIF, WebP, TIFF), equivalent of setting `pages` to `-1`.
|
138 | * @param {Object} [options.raw] - describes raw pixel input image data. See `raw()` for pixel ordering.
|
139 | * @param {number} [options.raw.width] - integral number of pixels wide.
|
140 | * @param {number} [options.raw.height] - integral number of pixels high.
|
141 | * @param {number} [options.raw.channels] - integral number of channels, between 1 and 4.
|
142 | * @param {boolean} [options.raw.premultiplied] - specifies that the raw input has already been premultiplied, set to `true`
|
143 | * to avoid sharp premultiplying the image. (optional, default `false`)
|
144 | * @param {Object} [options.create] - describes a new image to be created.
|
145 | * @param {number} [options.create.width] - integral number of pixels wide.
|
146 | * @param {number} [options.create.height] - integral number of pixels high.
|
147 | * @param {number} [options.create.channels] - integral number of channels, either 3 (RGB) or 4 (RGBA).
|
148 | * @param {string|Object} [options.create.background] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
|
149 | * @param {Object} [options.create.noise] - describes a noise to be created.
|
150 | * @param {string} [options.create.noise.type] - type of generated noise, currently only `gaussian` is supported.
|
151 | * @param {number} [options.create.noise.mean] - mean of pixels in generated noise.
|
152 | * @param {number} [options.create.noise.sigma] - standard deviation of pixels in generated noise.
|
153 | * @param {Object} [options.text] - describes a new text image to be created.
|
154 | * @param {string} [options.text.text] - text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`.
|
155 | * @param {string} [options.text.font] - font name to render with.
|
156 | * @param {string} [options.text.fontfile] - absolute filesystem path to a font file that can be used by `font`.
|
157 | * @param {number} [options.text.width=0] - Integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries.
|
158 | * @param {number} [options.text.height=0] - Maximum integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0.
|
159 | * @param {string} [options.text.align='left'] - Alignment style for multi-line text (`'left'`, `'centre'`, `'center'`, `'right'`).
|
160 | * @param {boolean} [options.text.justify=false] - set this to true to apply justification to the text.
|
161 | * @param {number} [options.text.dpi=72] - the resolution (size) at which to render the text. Does not take effect if `height` is specified.
|
162 | * @param {boolean} [options.text.rgba=false] - set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for pango markup features like `<span foreground="red">Red!</span>`.
|
163 | * @param {number} [options.text.spacing=0] - text line height in points. Will use the font line height if none is specified.
|
164 | * @param {string} [options.text.wrap='word'] - word wrapping style when width is provided, one of: 'word', 'char', 'charWord' (prefer char, fallback to word) or 'none'.
|
165 | * @returns {Sharp}
|
166 | * @throws {Error} Invalid parameters
|
167 | */
|
168 | const Sharp = function (input, options) {
|
169 | if (arguments.length === 1 && !is.defined(input)) {
|
170 | throw new Error('Invalid input');
|
171 | }
|
172 | if (!(this instanceof Sharp)) {
|
173 | return new Sharp(input, options);
|
174 | }
|
175 | stream.Duplex.call(this);
|
176 | this.options = {
|
177 | // resize options
|
178 | topOffsetPre: -1,
|
179 | leftOffsetPre: -1,
|
180 | widthPre: -1,
|
181 | heightPre: -1,
|
182 | topOffsetPost: -1,
|
183 | leftOffsetPost: -1,
|
184 | widthPost: -1,
|
185 | heightPost: -1,
|
186 | width: -1,
|
187 | height: -1,
|
188 | canvas: 'crop',
|
189 | position: 0,
|
190 | resizeBackground: [0, 0, 0, 255],
|
191 | useExifOrientation: false,
|
192 | angle: 0,
|
193 | rotationAngle: 0,
|
194 | rotationBackground: [0, 0, 0, 255],
|
195 | rotateBeforePreExtract: false,
|
196 | flip: false,
|
197 | flop: false,
|
198 | extendTop: 0,
|
199 | extendBottom: 0,
|
200 | extendLeft: 0,
|
201 | extendRight: 0,
|
202 | extendBackground: [0, 0, 0, 255],
|
203 | extendWith: 'background',
|
204 | withoutEnlargement: false,
|
205 | withoutReduction: false,
|
206 | affineMatrix: [],
|
207 | affineBackground: [0, 0, 0, 255],
|
208 | affineIdx: 0,
|
209 | affineIdy: 0,
|
210 | affineOdx: 0,
|
211 | affineOdy: 0,
|
212 | affineInterpolator: this.constructor.interpolators.bilinear,
|
213 | kernel: 'lanczos3',
|
214 | fastShrinkOnLoad: true,
|
215 | // operations
|
216 | tintA: 128,
|
217 | tintB: 128,
|
218 | flatten: false,
|
219 | flattenBackground: [0, 0, 0],
|
220 | unflatten: false,
|
221 | negate: false,
|
222 | negateAlpha: true,
|
223 | medianSize: 0,
|
224 | blurSigma: 0,
|
225 | sharpenSigma: 0,
|
226 | sharpenM1: 1,
|
227 | sharpenM2: 2,
|
228 | sharpenX1: 2,
|
229 | sharpenY2: 10,
|
230 | sharpenY3: 20,
|
231 | threshold: 0,
|
232 | thresholdGrayscale: true,
|
233 | trimBackground: [],
|
234 | trimThreshold: 0,
|
235 | gamma: 0,
|
236 | gammaOut: 0,
|
237 | greyscale: false,
|
238 | normalise: false,
|
239 | normaliseLower: 1,
|
240 | normaliseUpper: 99,
|
241 | claheWidth: 0,
|
242 | claheHeight: 0,
|
243 | claheMaxSlope: 3,
|
244 | brightness: 1,
|
245 | saturation: 1,
|
246 | hue: 0,
|
247 | lightness: 0,
|
248 | booleanBufferIn: null,
|
249 | booleanFileIn: '',
|
250 | joinChannelIn: [],
|
251 | extractChannel: -1,
|
252 | removeAlpha: false,
|
253 | ensureAlpha: -1,
|
254 | colourspace: 'srgb',
|
255 | colourspaceInput: 'last',
|
256 | composite: [],
|
257 | // output
|
258 | fileOut: '',
|
259 | formatOut: 'input',
|
260 | streamOut: false,
|
261 | withMetadata: false,
|
262 | withMetadataOrientation: -1,
|
263 | withMetadataDensity: 0,
|
264 | withMetadataIcc: '',
|
265 | withMetadataStrs: {},
|
266 | resolveWithObject: false,
|
267 | // output format
|
268 | jpegQuality: 80,
|
269 | jpegProgressive: false,
|
270 | jpegChromaSubsampling: '4:2:0',
|
271 | jpegTrellisQuantisation: false,
|
272 | jpegOvershootDeringing: false,
|
273 | jpegOptimiseScans: false,
|
274 | jpegOptimiseCoding: true,
|
275 | jpegQuantisationTable: 0,
|
276 | pngProgressive: false,
|
277 | pngCompressionLevel: 6,
|
278 | pngAdaptiveFiltering: false,
|
279 | pngPalette: false,
|
280 | pngQuality: 100,
|
281 | pngEffort: 7,
|
282 | pngBitdepth: 8,
|
283 | pngDither: 1,
|
284 | jp2Quality: 80,
|
285 | jp2TileHeight: 512,
|
286 | jp2TileWidth: 512,
|
287 | jp2Lossless: false,
|
288 | jp2ChromaSubsampling: '4:4:4',
|
289 | webpQuality: 80,
|
290 | webpAlphaQuality: 100,
|
291 | webpLossless: false,
|
292 | webpNearLossless: false,
|
293 | webpSmartSubsample: false,
|
294 | webpEffort: 4,
|
295 | webpMinSize: false,
|
296 | webpMixed: false,
|
297 | gifBitdepth: 8,
|
298 | gifEffort: 7,
|
299 | gifDither: 1,
|
300 | gifInterFrameMaxError: 0,
|
301 | gifInterPaletteMaxError: 3,
|
302 | gifReuse: true,
|
303 | gifProgressive: false,
|
304 | tiffQuality: 80,
|
305 | tiffCompression: 'jpeg',
|
306 | tiffPredictor: 'horizontal',
|
307 | tiffPyramid: false,
|
308 | tiffBitdepth: 8,
|
309 | tiffTile: false,
|
310 | tiffTileHeight: 256,
|
311 | tiffTileWidth: 256,
|
312 | tiffXres: 1.0,
|
313 | tiffYres: 1.0,
|
314 | tiffResolutionUnit: 'inch',
|
315 | heifQuality: 50,
|
316 | heifLossless: false,
|
317 | heifCompression: 'av1',
|
318 | heifEffort: 4,
|
319 | heifChromaSubsampling: '4:4:4',
|
320 | jxlDistance: 1,
|
321 | jxlDecodingTier: 0,
|
322 | jxlEffort: 7,
|
323 | jxlLossless: false,
|
324 | rawDepth: 'uchar',
|
325 | tileSize: 256,
|
326 | tileOverlap: 0,
|
327 | tileContainer: 'fs',
|
328 | tileLayout: 'dz',
|
329 | tileFormat: 'last',
|
330 | tileDepth: 'last',
|
331 | tileAngle: 0,
|
332 | tileSkipBlanks: -1,
|
333 | tileBackground: [255, 255, 255, 255],
|
334 | tileCentre: false,
|
335 | tileId: 'https://example.com/iiif',
|
336 | tileBasename: '',
|
337 | timeoutSeconds: 0,
|
338 | linearA: [],
|
339 | linearB: [],
|
340 | // Function to notify of libvips warnings
|
341 | debuglog: warning => {
|
342 | this.emit('warning', warning);
|
343 | debuglog(warning);
|
344 | },
|
345 | // Function to notify of queue length changes
|
346 | queueListener: function (queueLength) {
|
347 | Sharp.queue.emit('change', queueLength);
|
348 | }
|
349 | };
|
350 | this.options.input = this._createInputDescriptor(input, options, { allowStream: true });
|
351 | return this;
|
352 | };
|
353 | Object.setPrototypeOf(Sharp.prototype, stream.Duplex.prototype);
|
354 | Object.setPrototypeOf(Sharp, stream.Duplex);
|
355 |
|
356 | /**
|
357 | * Take a "snapshot" of the Sharp instance, returning a new instance.
|
358 | * Cloned instances inherit the input of their parent instance.
|
359 | * This allows multiple output Streams and therefore multiple processing pipelines to share a single input Stream.
|
360 | *
|
361 | * @example
|
362 | * const pipeline = sharp().rotate();
|
363 | * pipeline.clone().resize(800, 600).pipe(firstWritableStream);
|
364 | * pipeline.clone().extract({ left: 20, top: 20, width: 100, height: 100 }).pipe(secondWritableStream);
|
365 | * readableStream.pipe(pipeline);
|
366 | * // firstWritableStream receives auto-rotated, resized readableStream
|
367 | * // secondWritableStream receives auto-rotated, extracted region of readableStream
|
368 | *
|
369 | * @example
|
370 | * // Create a pipeline that will download an image, resize it and format it to different files
|
371 | * // Using Promises to know when the pipeline is complete
|
372 | * const fs = require("fs");
|
373 | * const got = require("got");
|
374 | * const sharpStream = sharp({ failOn: 'none' });
|
375 | *
|
376 | * const promises = [];
|
377 | *
|
378 | * promises.push(
|
379 | * sharpStream
|
380 | * .clone()
|
381 | * .jpeg({ quality: 100 })
|
382 | * .toFile("originalFile.jpg")
|
383 | * );
|
384 | *
|
385 | * promises.push(
|
386 | * sharpStream
|
387 | * .clone()
|
388 | * .resize({ width: 500 })
|
389 | * .jpeg({ quality: 80 })
|
390 | * .toFile("optimized-500.jpg")
|
391 | * );
|
392 | *
|
393 | * promises.push(
|
394 | * sharpStream
|
395 | * .clone()
|
396 | * .resize({ width: 500 })
|
397 | * .webp({ quality: 80 })
|
398 | * .toFile("optimized-500.webp")
|
399 | * );
|
400 | *
|
401 | * // https://github.com/sindresorhus/got/blob/main/documentation/3-streams.md
|
402 | * got.stream("https://www.example.com/some-file.jpg").pipe(sharpStream);
|
403 | *
|
404 | * Promise.all(promises)
|
405 | * .then(res => { console.log("Done!", res); })
|
406 | * .catch(err => {
|
407 | * console.error("Error processing files, let's clean it up", err);
|
408 | * try {
|
409 | * fs.unlinkSync("originalFile.jpg");
|
410 | * fs.unlinkSync("optimized-500.jpg");
|
411 | * fs.unlinkSync("optimized-500.webp");
|
412 | * } catch (e) {}
|
413 | * });
|
414 | *
|
415 | * @returns {Sharp}
|
416 | */
|
417 | function clone () {
|
418 | // Clone existing options
|
419 | const clone = this.constructor.call();
|
420 | clone.options = Object.assign({}, this.options);
|
421 | // Pass 'finish' event to clone for Stream-based input
|
422 | if (this._isStreamInput()) {
|
423 | this.on('finish', () => {
|
424 | // Clone inherits input data
|
425 | this._flattenBufferIn();
|
426 | clone.options.bufferIn = this.options.bufferIn;
|
427 | clone.emit('finish');
|
428 | });
|
429 | }
|
430 | return clone;
|
431 | }
|
432 | Object.assign(Sharp.prototype, { clone });
|
433 |
|
434 | /**
|
435 | * Export constructor.
|
436 | * @private
|
437 | */
|
438 | module.exports = Sharp;
|