1 | // Copyright 2013 Lovell Fuller and others.
|
2 | // SPDX-License-Identifier: Apache-2.0
|
3 |
|
4 | ;
|
5 |
|
6 | const is = require('./is');
|
7 |
|
8 | /**
|
9 | * Blend modes.
|
10 | * @member
|
11 | * @private
|
12 | */
|
13 | const blend = {
|
14 | clear: 'clear',
|
15 | source: 'source',
|
16 | over: 'over',
|
17 | in: 'in',
|
18 | out: 'out',
|
19 | atop: 'atop',
|
20 | dest: 'dest',
|
21 | 'dest-over': 'dest-over',
|
22 | 'dest-in': 'dest-in',
|
23 | 'dest-out': 'dest-out',
|
24 | 'dest-atop': 'dest-atop',
|
25 | xor: 'xor',
|
26 | add: 'add',
|
27 | saturate: 'saturate',
|
28 | multiply: 'multiply',
|
29 | screen: 'screen',
|
30 | overlay: 'overlay',
|
31 | darken: 'darken',
|
32 | lighten: 'lighten',
|
33 | 'colour-dodge': 'colour-dodge',
|
34 | 'color-dodge': 'colour-dodge',
|
35 | 'colour-burn': 'colour-burn',
|
36 | 'color-burn': 'colour-burn',
|
37 | 'hard-light': 'hard-light',
|
38 | 'soft-light': 'soft-light',
|
39 | difference: 'difference',
|
40 | exclusion: 'exclusion'
|
41 | };
|
42 |
|
43 | /**
|
44 | * Composite image(s) over the processed (resized, extracted etc.) image.
|
45 | *
|
46 | * The images to composite must be the same size or smaller than the processed image.
|
47 | * If both `top` and `left` options are provided, they take precedence over `gravity`.
|
48 | *
|
49 | * Any resize, rotate or extract operations in the same processing pipeline
|
50 | * will always be applied to the input image before composition.
|
51 | *
|
52 | * The `blend` option can be one of `clear`, `source`, `over`, `in`, `out`, `atop`,
|
53 | * `dest`, `dest-over`, `dest-in`, `dest-out`, `dest-atop`,
|
54 | * `xor`, `add`, `saturate`, `multiply`, `screen`, `overlay`, `darken`, `lighten`,
|
55 | * `colour-dodge`, `color-dodge`, `colour-burn`,`color-burn`,
|
56 | * `hard-light`, `soft-light`, `difference`, `exclusion`.
|
57 | *
|
58 | * More information about blend modes can be found at
|
59 | * https://www.libvips.org/API/current/libvips-conversion.html#VipsBlendMode
|
60 | * and https://www.cairographics.org/operators/
|
61 | *
|
62 | * @since 0.22.0
|
63 | *
|
64 | * @example
|
65 | * await sharp(background)
|
66 | * .composite([
|
67 | * { input: layer1, gravity: 'northwest' },
|
68 | * { input: layer2, gravity: 'southeast' },
|
69 | * ])
|
70 | * .toFile('combined.png');
|
71 | *
|
72 | * @example
|
73 | * const output = await sharp('input.gif', { animated: true })
|
74 | * .composite([
|
75 | * { input: 'overlay.png', tile: true, blend: 'saturate' }
|
76 | * ])
|
77 | * .toBuffer();
|
78 | *
|
79 | * @example
|
80 | * sharp('input.png')
|
81 | * .rotate(180)
|
82 | * .resize(300)
|
83 | * .flatten( { background: '#ff6600' } )
|
84 | * .composite([{ input: 'overlay.png', gravity: 'southeast' }])
|
85 | * .sharpen()
|
86 | * .withMetadata()
|
87 | * .webp( { quality: 90 } )
|
88 | * .toBuffer()
|
89 | * .then(function(outputBuffer) {
|
90 | * // outputBuffer contains upside down, 300px wide, alpha channel flattened
|
91 | * // onto orange background, composited with overlay.png with SE gravity,
|
92 | * // sharpened, with metadata, 90% quality WebP image data. Phew!
|
93 | * });
|
94 | *
|
95 | * @param {Object[]} images - Ordered list of images to composite
|
96 | * @param {Buffer|String} [images[].input] - Buffer containing image data, String containing the path to an image file, or Create object (see below)
|
97 | * @param {Object} [images[].input.create] - describes a blank overlay to be created.
|
98 | * @param {Number} [images[].input.create.width]
|
99 | * @param {Number} [images[].input.create.height]
|
100 | * @param {Number} [images[].input.create.channels] - 3-4
|
101 | * @param {String|Object} [images[].input.create.background] - parsed by the [color](https://www.npmjs.org/package/color) module to extract values for red, green, blue and alpha.
|
102 | * @param {Object} [images[].input.text] - describes a new text image to be created.
|
103 | * @param {string} [images[].input.text.text] - text to render as a UTF-8 string. It can contain Pango markup, for example `<i>Le</i>Monde`.
|
104 | * @param {string} [images[].input.text.font] - font name to render with.
|
105 | * @param {string} [images[].input.text.fontfile] - absolute filesystem path to a font file that can be used by `font`.
|
106 | * @param {number} [images[].input.text.width=0] - integral number of pixels to word-wrap at. Lines of text wider than this will be broken at word boundaries.
|
107 | * @param {number} [images[].input.text.height=0] - integral number of pixels high. When defined, `dpi` will be ignored and the text will automatically fit the pixel resolution defined by `width` and `height`. Will be ignored if `width` is not specified or set to 0.
|
108 | * @param {string} [images[].input.text.align='left'] - text alignment (`'left'`, `'centre'`, `'center'`, `'right'`).
|
109 | * @param {boolean} [images[].input.text.justify=false] - set this to true to apply justification to the text.
|
110 | * @param {number} [images[].input.text.dpi=72] - the resolution (size) at which to render the text. Does not take effect if `height` is specified.
|
111 | * @param {boolean} [images[].input.text.rgba=false] - set this to true to enable RGBA output. This is useful for colour emoji rendering, or support for Pango markup features like `<span foreground="red">Red!</span>`.
|
112 | * @param {number} [images[].input.text.spacing=0] - text line height in points. Will use the font line height if none is specified.
|
113 | * @param {String} [images[].blend='over'] - how to blend this image with the image below.
|
114 | * @param {String} [images[].gravity='centre'] - gravity at which to place the overlay.
|
115 | * @param {Number} [images[].top] - the pixel offset from the top edge.
|
116 | * @param {Number} [images[].left] - the pixel offset from the left edge.
|
117 | * @param {Boolean} [images[].tile=false] - set to true to repeat the overlay image across the entire image with the given `gravity`.
|
118 | * @param {Boolean} [images[].premultiplied=false] - set to true to avoid premultiplying the image below. Equivalent to the `--premultiplied` vips option.
|
119 | * @param {Number} [images[].density=72] - number representing the DPI for vector overlay image.
|
120 | * @param {Object} [images[].raw] - describes overlay when using raw pixel data.
|
121 | * @param {Number} [images[].raw.width]
|
122 | * @param {Number} [images[].raw.height]
|
123 | * @param {Number} [images[].raw.channels]
|
124 | * @param {boolean} [images[].animated=false] - Set to `true` to read all frames/pages of an animated image.
|
125 | * @param {string} [images[].failOn='warning'] - @see {@link /api-constructor#parameters|constructor parameters}
|
126 | * @param {number|boolean} [images[].limitInputPixels=268402689] - @see {@link /api-constructor#parameters|constructor parameters}
|
127 | * @returns {Sharp}
|
128 | * @throws {Error} Invalid parameters
|
129 | */
|
130 | function composite (images) {
|
131 | if (!Array.isArray(images)) {
|
132 | throw is.invalidParameterError('images to composite', 'array', images);
|
133 | }
|
134 | this.options.composite = images.map(image => {
|
135 | if (!is.object(image)) {
|
136 | throw is.invalidParameterError('image to composite', 'object', image);
|
137 | }
|
138 | const inputOptions = this._inputOptionsFromObject(image);
|
139 | const composite = {
|
140 | input: this._createInputDescriptor(image.input, inputOptions, { allowStream: false }),
|
141 | blend: 'over',
|
142 | tile: false,
|
143 | left: 0,
|
144 | top: 0,
|
145 | hasOffset: false,
|
146 | gravity: 0,
|
147 | premultiplied: false
|
148 | };
|
149 | if (is.defined(image.blend)) {
|
150 | if (is.string(blend[image.blend])) {
|
151 | composite.blend = blend[image.blend];
|
152 | } else {
|
153 | throw is.invalidParameterError('blend', 'valid blend name', image.blend);
|
154 | }
|
155 | }
|
156 | if (is.defined(image.tile)) {
|
157 | if (is.bool(image.tile)) {
|
158 | composite.tile = image.tile;
|
159 | } else {
|
160 | throw is.invalidParameterError('tile', 'boolean', image.tile);
|
161 | }
|
162 | }
|
163 | if (is.defined(image.left)) {
|
164 | if (is.integer(image.left)) {
|
165 | composite.left = image.left;
|
166 | } else {
|
167 | throw is.invalidParameterError('left', 'integer', image.left);
|
168 | }
|
169 | }
|
170 | if (is.defined(image.top)) {
|
171 | if (is.integer(image.top)) {
|
172 | composite.top = image.top;
|
173 | } else {
|
174 | throw is.invalidParameterError('top', 'integer', image.top);
|
175 | }
|
176 | }
|
177 | if (is.defined(image.top) !== is.defined(image.left)) {
|
178 | throw new Error('Expected both left and top to be set');
|
179 | } else {
|
180 | composite.hasOffset = is.integer(image.top) && is.integer(image.left);
|
181 | }
|
182 | if (is.defined(image.gravity)) {
|
183 | if (is.integer(image.gravity) && is.inRange(image.gravity, 0, 8)) {
|
184 | composite.gravity = image.gravity;
|
185 | } else if (is.string(image.gravity) && is.integer(this.constructor.gravity[image.gravity])) {
|
186 | composite.gravity = this.constructor.gravity[image.gravity];
|
187 | } else {
|
188 | throw is.invalidParameterError('gravity', 'valid gravity', image.gravity);
|
189 | }
|
190 | }
|
191 | if (is.defined(image.premultiplied)) {
|
192 | if (is.bool(image.premultiplied)) {
|
193 | composite.premultiplied = image.premultiplied;
|
194 | } else {
|
195 | throw is.invalidParameterError('premultiplied', 'boolean', image.premultiplied);
|
196 | }
|
197 | }
|
198 | return composite;
|
199 | });
|
200 | return this;
|
201 | }
|
202 |
|
203 | /**
|
204 | * Decorate the Sharp prototype with composite-related functions.
|
205 | * @private
|
206 | */
|
207 | module.exports = function (Sharp) {
|
208 | Sharp.prototype.composite = composite;
|
209 | Sharp.blend = blend;
|
210 | };
|