UNPKG

54.6 kBJavaScriptView Raw
1/*
2 MIT License http://www.opensource.org/licenses/mit-license.php
3 Author Tobias Koppers @sokra
4*/
5
6"use strict";
7
8const Chunk = require("../Chunk");
9const { STAGE_ADVANCED } = require("../OptimizationStages");
10const WebpackError = require("../WebpackError");
11const { requestToId } = require("../ids/IdHelpers");
12const { isSubset } = require("../util/SetHelpers");
13const SortableSet = require("../util/SortableSet");
14const {
15 compareModulesByIdentifier,
16 compareIterables
17} = require("../util/comparators");
18const createHash = require("../util/createHash");
19const deterministicGrouping = require("../util/deterministicGrouping");
20const { makePathsRelative } = require("../util/identifier");
21const memoize = require("../util/memoize");
22const MinMaxSizeWarning = require("./MinMaxSizeWarning");
23
24/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksCacheGroup} OptimizationSplitChunksCacheGroup */
25/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksGetCacheGroups} OptimizationSplitChunksGetCacheGroups */
26/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksOptions} OptimizationSplitChunksOptions */
27/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksSizes} OptimizationSplitChunksSizes */
28/** @typedef {import("../../declarations/WebpackOptions").Output} OutputOptions */
29/** @typedef {import("../ChunkGraph")} ChunkGraph */
30/** @typedef {import("../ChunkGroup")} ChunkGroup */
31/** @typedef {import("../Compilation").AssetInfo} AssetInfo */
32/** @typedef {import("../Compilation").PathData} PathData */
33/** @typedef {import("../Compiler")} Compiler */
34/** @typedef {import("../Module")} Module */
35/** @typedef {import("../ModuleGraph")} ModuleGraph */
36/** @typedef {import("../util/deterministicGrouping").GroupedItems<Module>} DeterministicGroupingGroupedItemsForModule */
37/** @typedef {import("../util/deterministicGrouping").Options<Module>} DeterministicGroupingOptionsForModule */
38
39/** @typedef {Record<string, number>} SplitChunksSizes */
40
41/**
42 * @callback ChunkFilterFunction
43 * @param {Chunk} chunk
44 * @returns {boolean}
45 */
46
47/**
48 * @callback CombineSizeFunction
49 * @param {number} a
50 * @param {number} b
51 * @returns {number}
52 */
53
54/**
55 * @typedef {Object} CacheGroupSource
56 * @property {string=} key
57 * @property {number=} priority
58 * @property {GetName=} getName
59 * @property {ChunkFilterFunction=} chunksFilter
60 * @property {boolean=} enforce
61 * @property {SplitChunksSizes} minSize
62 * @property {SplitChunksSizes} minSizeReduction
63 * @property {SplitChunksSizes} minRemainingSize
64 * @property {SplitChunksSizes} enforceSizeThreshold
65 * @property {SplitChunksSizes} maxAsyncSize
66 * @property {SplitChunksSizes} maxInitialSize
67 * @property {number=} minChunks
68 * @property {number=} maxAsyncRequests
69 * @property {number=} maxInitialRequests
70 * @property {(string | function(PathData, AssetInfo=): string)=} filename
71 * @property {string=} idHint
72 * @property {string} automaticNameDelimiter
73 * @property {boolean=} reuseExistingChunk
74 * @property {boolean=} usedExports
75 */
76
77/**
78 * @typedef {Object} CacheGroup
79 * @property {string} key
80 * @property {number=} priority
81 * @property {GetName=} getName
82 * @property {ChunkFilterFunction=} chunksFilter
83 * @property {SplitChunksSizes} minSize
84 * @property {SplitChunksSizes} minSizeReduction
85 * @property {SplitChunksSizes} minRemainingSize
86 * @property {SplitChunksSizes} enforceSizeThreshold
87 * @property {SplitChunksSizes} maxAsyncSize
88 * @property {SplitChunksSizes} maxInitialSize
89 * @property {number=} minChunks
90 * @property {number=} maxAsyncRequests
91 * @property {number=} maxInitialRequests
92 * @property {(string | function(PathData, AssetInfo=): string)=} filename
93 * @property {string=} idHint
94 * @property {string} automaticNameDelimiter
95 * @property {boolean} reuseExistingChunk
96 * @property {boolean} usedExports
97 * @property {boolean} _validateSize
98 * @property {boolean} _validateRemainingSize
99 * @property {SplitChunksSizes} _minSizeForMaxSize
100 * @property {boolean} _conditionalEnforce
101 */
102
103/**
104 * @typedef {Object} FallbackCacheGroup
105 * @property {ChunkFilterFunction} chunksFilter
106 * @property {SplitChunksSizes} minSize
107 * @property {SplitChunksSizes} maxAsyncSize
108 * @property {SplitChunksSizes} maxInitialSize
109 * @property {string} automaticNameDelimiter
110 */
111
112/**
113 * @typedef {Object} CacheGroupsContext
114 * @property {ModuleGraph} moduleGraph
115 * @property {ChunkGraph} chunkGraph
116 */
117
118/**
119 * @callback GetCacheGroups
120 * @param {Module} module
121 * @param {CacheGroupsContext} context
122 * @returns {CacheGroupSource[]}
123 */
124
125/**
126 * @callback GetName
127 * @param {Module=} module
128 * @param {Chunk[]=} chunks
129 * @param {string=} key
130 * @returns {string=}
131 */
132
133/**
134 * @typedef {Object} SplitChunksOptions
135 * @property {ChunkFilterFunction} chunksFilter
136 * @property {string[]} defaultSizeTypes
137 * @property {SplitChunksSizes} minSize
138 * @property {SplitChunksSizes} minSizeReduction
139 * @property {SplitChunksSizes} minRemainingSize
140 * @property {SplitChunksSizes} enforceSizeThreshold
141 * @property {SplitChunksSizes} maxInitialSize
142 * @property {SplitChunksSizes} maxAsyncSize
143 * @property {number} minChunks
144 * @property {number} maxAsyncRequests
145 * @property {number} maxInitialRequests
146 * @property {boolean} hidePathInfo
147 * @property {string | function(PathData, AssetInfo=): string} filename
148 * @property {string} automaticNameDelimiter
149 * @property {GetCacheGroups} getCacheGroups
150 * @property {GetName} getName
151 * @property {boolean} usedExports
152 * @property {FallbackCacheGroup} fallbackCacheGroup
153 */
154
155/**
156 * @typedef {Object} ChunksInfoItem
157 * @property {SortableSet<Module>} modules
158 * @property {CacheGroup} cacheGroup
159 * @property {number} cacheGroupIndex
160 * @property {string} name
161 * @property {Record<string, number>} sizes
162 * @property {Set<Chunk>} chunks
163 * @property {Set<Chunk>} reuseableChunks
164 * @property {Set<bigint | Chunk>} chunksKeys
165 */
166
167const defaultGetName = /** @type {GetName} */ (() => {});
168
169const deterministicGroupingForModules =
170 /** @type {function(DeterministicGroupingOptionsForModule): DeterministicGroupingGroupedItemsForModule[]} */ (
171 deterministicGrouping
172 );
173
174/** @type {WeakMap<Module, string>} */
175const getKeyCache = new WeakMap();
176
177/**
178 * @param {string} name a filename to hash
179 * @param {OutputOptions} outputOptions hash function used
180 * @returns {string} hashed filename
181 */
182const hashFilename = (name, outputOptions) => {
183 const digest = /** @type {string} */ (
184 createHash(outputOptions.hashFunction)
185 .update(name)
186 .digest(outputOptions.hashDigest)
187 );
188 return digest.slice(0, 8);
189};
190
191/**
192 * @param {Chunk} chunk the chunk
193 * @returns {number} the number of requests
194 */
195const getRequests = chunk => {
196 let requests = 0;
197 for (const chunkGroup of chunk.groupsIterable) {
198 requests = Math.max(requests, chunkGroup.chunks.length);
199 }
200 return requests;
201};
202
203const mapObject = (obj, fn) => {
204 const newObj = Object.create(null);
205 for (const key of Object.keys(obj)) {
206 newObj[key] = fn(obj[key], key);
207 }
208 return newObj;
209};
210
211/**
212 * @template T
213 * @param {Set<T>} a set
214 * @param {Set<T>} b other set
215 * @returns {boolean} true if at least one item of a is in b
216 */
217const isOverlap = (a, b) => {
218 for (const item of a) {
219 if (b.has(item)) return true;
220 }
221 return false;
222};
223
224const compareModuleIterables = compareIterables(compareModulesByIdentifier);
225
226/**
227 * @param {ChunksInfoItem} a item
228 * @param {ChunksInfoItem} b item
229 * @returns {number} compare result
230 */
231const compareEntries = (a, b) => {
232 // 1. by priority
233 const diffPriority = a.cacheGroup.priority - b.cacheGroup.priority;
234 if (diffPriority) return diffPriority;
235 // 2. by number of chunks
236 const diffCount = a.chunks.size - b.chunks.size;
237 if (diffCount) return diffCount;
238 // 3. by size reduction
239 const aSizeReduce = totalSize(a.sizes) * (a.chunks.size - 1);
240 const bSizeReduce = totalSize(b.sizes) * (b.chunks.size - 1);
241 const diffSizeReduce = aSizeReduce - bSizeReduce;
242 if (diffSizeReduce) return diffSizeReduce;
243 // 4. by cache group index
244 const indexDiff = b.cacheGroupIndex - a.cacheGroupIndex;
245 if (indexDiff) return indexDiff;
246 // 5. by number of modules (to be able to compare by identifier)
247 const modulesA = a.modules;
248 const modulesB = b.modules;
249 const diff = modulesA.size - modulesB.size;
250 if (diff) return diff;
251 // 6. by module identifiers
252 modulesA.sort();
253 modulesB.sort();
254 return compareModuleIterables(modulesA, modulesB);
255};
256
257const INITIAL_CHUNK_FILTER = chunk => chunk.canBeInitial();
258const ASYNC_CHUNK_FILTER = chunk => !chunk.canBeInitial();
259const ALL_CHUNK_FILTER = chunk => true;
260
261/**
262 * @param {OptimizationSplitChunksSizes} value the sizes
263 * @param {string[]} defaultSizeTypes the default size types
264 * @returns {SplitChunksSizes} normalized representation
265 */
266const normalizeSizes = (value, defaultSizeTypes) => {
267 if (typeof value === "number") {
268 /** @type {Record<string, number>} */
269 const o = {};
270 for (const sizeType of defaultSizeTypes) o[sizeType] = value;
271 return o;
272 } else if (typeof value === "object" && value !== null) {
273 return { ...value };
274 } else {
275 return {};
276 }
277};
278
279/**
280 * @param {...SplitChunksSizes} sizes the sizes
281 * @returns {SplitChunksSizes} the merged sizes
282 */
283const mergeSizes = (...sizes) => {
284 /** @type {SplitChunksSizes} */
285 let merged = {};
286 for (let i = sizes.length - 1; i >= 0; i--) {
287 merged = Object.assign(merged, sizes[i]);
288 }
289 return merged;
290};
291
292/**
293 * @param {SplitChunksSizes} sizes the sizes
294 * @returns {boolean} true, if there are sizes > 0
295 */
296const hasNonZeroSizes = sizes => {
297 for (const key of Object.keys(sizes)) {
298 if (sizes[key] > 0) return true;
299 }
300 return false;
301};
302
303/**
304 * @param {SplitChunksSizes} a first sizes
305 * @param {SplitChunksSizes} b second sizes
306 * @param {CombineSizeFunction} combine a function to combine sizes
307 * @returns {SplitChunksSizes} the combine sizes
308 */
309const combineSizes = (a, b, combine) => {
310 const aKeys = new Set(Object.keys(a));
311 const bKeys = new Set(Object.keys(b));
312 /** @type {SplitChunksSizes} */
313 const result = {};
314 for (const key of aKeys) {
315 if (bKeys.has(key)) {
316 result[key] = combine(a[key], b[key]);
317 } else {
318 result[key] = a[key];
319 }
320 }
321 for (const key of bKeys) {
322 if (!aKeys.has(key)) {
323 result[key] = b[key];
324 }
325 }
326 return result;
327};
328
329/**
330 * @param {SplitChunksSizes} sizes the sizes
331 * @param {SplitChunksSizes} minSize the min sizes
332 * @returns {boolean} true if there are sizes and all existing sizes are at least `minSize`
333 */
334const checkMinSize = (sizes, minSize) => {
335 for (const key of Object.keys(minSize)) {
336 const size = sizes[key];
337 if (size === undefined || size === 0) continue;
338 if (size < minSize[key]) return false;
339 }
340 return true;
341};
342
343/**
344 * @param {SplitChunksSizes} sizes the sizes
345 * @param {SplitChunksSizes} minSizeReduction the min sizes
346 * @param {number} chunkCount number of chunks
347 * @returns {boolean} true if there are sizes and all existing sizes are at least `minSizeReduction`
348 */
349const checkMinSizeReduction = (sizes, minSizeReduction, chunkCount) => {
350 for (const key of Object.keys(minSizeReduction)) {
351 const size = sizes[key];
352 if (size === undefined || size === 0) continue;
353 if (size * chunkCount < minSizeReduction[key]) return false;
354 }
355 return true;
356};
357
358/**
359 * @param {SplitChunksSizes} sizes the sizes
360 * @param {SplitChunksSizes} minSize the min sizes
361 * @returns {undefined | string[]} list of size types that are below min size
362 */
363const getViolatingMinSizes = (sizes, minSize) => {
364 let list;
365 for (const key of Object.keys(minSize)) {
366 const size = sizes[key];
367 if (size === undefined || size === 0) continue;
368 if (size < minSize[key]) {
369 if (list === undefined) list = [key];
370 else list.push(key);
371 }
372 }
373 return list;
374};
375
376/**
377 * @param {SplitChunksSizes} sizes the sizes
378 * @returns {number} the total size
379 */
380const totalSize = sizes => {
381 let size = 0;
382 for (const key of Object.keys(sizes)) {
383 size += sizes[key];
384 }
385 return size;
386};
387
388/**
389 * @param {false|string|Function} name the chunk name
390 * @returns {GetName} a function to get the name of the chunk
391 */
392const normalizeName = name => {
393 if (typeof name === "string") {
394 return () => name;
395 }
396 if (typeof name === "function") {
397 return /** @type {GetName} */ (name);
398 }
399};
400
401/**
402 * @param {OptimizationSplitChunksCacheGroup["chunks"]} chunks the chunk filter option
403 * @returns {ChunkFilterFunction} the chunk filter function
404 */
405const normalizeChunksFilter = chunks => {
406 if (chunks === "initial") {
407 return INITIAL_CHUNK_FILTER;
408 }
409 if (chunks === "async") {
410 return ASYNC_CHUNK_FILTER;
411 }
412 if (chunks === "all") {
413 return ALL_CHUNK_FILTER;
414 }
415 if (typeof chunks === "function") {
416 return chunks;
417 }
418};
419
420/**
421 * @param {GetCacheGroups | Record<string, false|string|RegExp|OptimizationSplitChunksGetCacheGroups|OptimizationSplitChunksCacheGroup>} cacheGroups the cache group options
422 * @param {string[]} defaultSizeTypes the default size types
423 * @returns {GetCacheGroups} a function to get the cache groups
424 */
425const normalizeCacheGroups = (cacheGroups, defaultSizeTypes) => {
426 if (typeof cacheGroups === "function") {
427 return cacheGroups;
428 }
429 if (typeof cacheGroups === "object" && cacheGroups !== null) {
430 /** @type {(function(Module, CacheGroupsContext, CacheGroupSource[]): void)[]} */
431 const handlers = [];
432 for (const key of Object.keys(cacheGroups)) {
433 const option = cacheGroups[key];
434 if (option === false) {
435 continue;
436 }
437 if (typeof option === "string" || option instanceof RegExp) {
438 const source = createCacheGroupSource({}, key, defaultSizeTypes);
439 handlers.push((module, context, results) => {
440 if (checkTest(option, module, context)) {
441 results.push(source);
442 }
443 });
444 } else if (typeof option === "function") {
445 const cache = new WeakMap();
446 handlers.push((module, context, results) => {
447 const result = option(module);
448 if (result) {
449 const groups = Array.isArray(result) ? result : [result];
450 for (const group of groups) {
451 const cachedSource = cache.get(group);
452 if (cachedSource !== undefined) {
453 results.push(cachedSource);
454 } else {
455 const source = createCacheGroupSource(
456 group,
457 key,
458 defaultSizeTypes
459 );
460 cache.set(group, source);
461 results.push(source);
462 }
463 }
464 }
465 });
466 } else {
467 const source = createCacheGroupSource(option, key, defaultSizeTypes);
468 handlers.push((module, context, results) => {
469 if (
470 checkTest(option.test, module, context) &&
471 checkModuleType(option.type, module) &&
472 checkModuleLayer(option.layer, module)
473 ) {
474 results.push(source);
475 }
476 });
477 }
478 }
479 /**
480 * @param {Module} module the current module
481 * @param {CacheGroupsContext} context the current context
482 * @returns {CacheGroupSource[]} the matching cache groups
483 */
484 const fn = (module, context) => {
485 /** @type {CacheGroupSource[]} */
486 let results = [];
487 for (const fn of handlers) {
488 fn(module, context, results);
489 }
490 return results;
491 };
492 return fn;
493 }
494 return () => null;
495};
496
497/**
498 * @param {undefined|boolean|string|RegExp|Function} test test option
499 * @param {Module} module the module
500 * @param {CacheGroupsContext} context context object
501 * @returns {boolean} true, if the module should be selected
502 */
503const checkTest = (test, module, context) => {
504 if (test === undefined) return true;
505 if (typeof test === "function") {
506 return test(module, context);
507 }
508 if (typeof test === "boolean") return test;
509 if (typeof test === "string") {
510 const name = module.nameForCondition();
511 return name && name.startsWith(test);
512 }
513 if (test instanceof RegExp) {
514 const name = module.nameForCondition();
515 return name && test.test(name);
516 }
517 return false;
518};
519
520/**
521 * @param {undefined|string|RegExp|Function} test type option
522 * @param {Module} module the module
523 * @returns {boolean} true, if the module should be selected
524 */
525const checkModuleType = (test, module) => {
526 if (test === undefined) return true;
527 if (typeof test === "function") {
528 return test(module.type);
529 }
530 if (typeof test === "string") {
531 const type = module.type;
532 return test === type;
533 }
534 if (test instanceof RegExp) {
535 const type = module.type;
536 return test.test(type);
537 }
538 return false;
539};
540
541/**
542 * @param {undefined|string|RegExp|Function} test type option
543 * @param {Module} module the module
544 * @returns {boolean} true, if the module should be selected
545 */
546const checkModuleLayer = (test, module) => {
547 if (test === undefined) return true;
548 if (typeof test === "function") {
549 return test(module.layer);
550 }
551 if (typeof test === "string") {
552 const layer = module.layer;
553 return test === "" ? !layer : layer && layer.startsWith(test);
554 }
555 if (test instanceof RegExp) {
556 const layer = module.layer;
557 return test.test(layer);
558 }
559 return false;
560};
561
562/**
563 * @param {OptimizationSplitChunksCacheGroup} options the group options
564 * @param {string} key key of cache group
565 * @param {string[]} defaultSizeTypes the default size types
566 * @returns {CacheGroupSource} the normalized cached group
567 */
568const createCacheGroupSource = (options, key, defaultSizeTypes) => {
569 const minSize = normalizeSizes(options.minSize, defaultSizeTypes);
570 const minSizeReduction = normalizeSizes(
571 options.minSizeReduction,
572 defaultSizeTypes
573 );
574 const maxSize = normalizeSizes(options.maxSize, defaultSizeTypes);
575 return {
576 key,
577 priority: options.priority,
578 getName: normalizeName(options.name),
579 chunksFilter: normalizeChunksFilter(options.chunks),
580 enforce: options.enforce,
581 minSize,
582 minSizeReduction,
583 minRemainingSize: mergeSizes(
584 normalizeSizes(options.minRemainingSize, defaultSizeTypes),
585 minSize
586 ),
587 enforceSizeThreshold: normalizeSizes(
588 options.enforceSizeThreshold,
589 defaultSizeTypes
590 ),
591 maxAsyncSize: mergeSizes(
592 normalizeSizes(options.maxAsyncSize, defaultSizeTypes),
593 maxSize
594 ),
595 maxInitialSize: mergeSizes(
596 normalizeSizes(options.maxInitialSize, defaultSizeTypes),
597 maxSize
598 ),
599 minChunks: options.minChunks,
600 maxAsyncRequests: options.maxAsyncRequests,
601 maxInitialRequests: options.maxInitialRequests,
602 filename: options.filename,
603 idHint: options.idHint,
604 automaticNameDelimiter: options.automaticNameDelimiter,
605 reuseExistingChunk: options.reuseExistingChunk,
606 usedExports: options.usedExports
607 };
608};
609
610module.exports = class SplitChunksPlugin {
611 /**
612 * @param {OptimizationSplitChunksOptions=} options plugin options
613 */
614 constructor(options = {}) {
615 const defaultSizeTypes = options.defaultSizeTypes || [
616 "javascript",
617 "unknown"
618 ];
619 const fallbackCacheGroup = options.fallbackCacheGroup || {};
620 const minSize = normalizeSizes(options.minSize, defaultSizeTypes);
621 const minSizeReduction = normalizeSizes(
622 options.minSizeReduction,
623 defaultSizeTypes
624 );
625 const maxSize = normalizeSizes(options.maxSize, defaultSizeTypes);
626
627 /** @type {SplitChunksOptions} */
628 this.options = {
629 chunksFilter: normalizeChunksFilter(options.chunks || "all"),
630 defaultSizeTypes,
631 minSize,
632 minSizeReduction,
633 minRemainingSize: mergeSizes(
634 normalizeSizes(options.minRemainingSize, defaultSizeTypes),
635 minSize
636 ),
637 enforceSizeThreshold: normalizeSizes(
638 options.enforceSizeThreshold,
639 defaultSizeTypes
640 ),
641 maxAsyncSize: mergeSizes(
642 normalizeSizes(options.maxAsyncSize, defaultSizeTypes),
643 maxSize
644 ),
645 maxInitialSize: mergeSizes(
646 normalizeSizes(options.maxInitialSize, defaultSizeTypes),
647 maxSize
648 ),
649 minChunks: options.minChunks || 1,
650 maxAsyncRequests: options.maxAsyncRequests || 1,
651 maxInitialRequests: options.maxInitialRequests || 1,
652 hidePathInfo: options.hidePathInfo || false,
653 filename: options.filename || undefined,
654 getCacheGroups: normalizeCacheGroups(
655 options.cacheGroups,
656 defaultSizeTypes
657 ),
658 getName: options.name ? normalizeName(options.name) : defaultGetName,
659 automaticNameDelimiter: options.automaticNameDelimiter,
660 usedExports: options.usedExports,
661 fallbackCacheGroup: {
662 chunksFilter: normalizeChunksFilter(
663 fallbackCacheGroup.chunks || options.chunks || "all"
664 ),
665 minSize: mergeSizes(
666 normalizeSizes(fallbackCacheGroup.minSize, defaultSizeTypes),
667 minSize
668 ),
669 maxAsyncSize: mergeSizes(
670 normalizeSizes(fallbackCacheGroup.maxAsyncSize, defaultSizeTypes),
671 normalizeSizes(fallbackCacheGroup.maxSize, defaultSizeTypes),
672 normalizeSizes(options.maxAsyncSize, defaultSizeTypes),
673 normalizeSizes(options.maxSize, defaultSizeTypes)
674 ),
675 maxInitialSize: mergeSizes(
676 normalizeSizes(fallbackCacheGroup.maxInitialSize, defaultSizeTypes),
677 normalizeSizes(fallbackCacheGroup.maxSize, defaultSizeTypes),
678 normalizeSizes(options.maxInitialSize, defaultSizeTypes),
679 normalizeSizes(options.maxSize, defaultSizeTypes)
680 ),
681 automaticNameDelimiter:
682 fallbackCacheGroup.automaticNameDelimiter ||
683 options.automaticNameDelimiter ||
684 "~"
685 }
686 };
687
688 /** @type {WeakMap<CacheGroupSource, CacheGroup>} */
689 this._cacheGroupCache = new WeakMap();
690 }
691
692 /**
693 * @param {CacheGroupSource} cacheGroupSource source
694 * @returns {CacheGroup} the cache group (cached)
695 */
696 _getCacheGroup(cacheGroupSource) {
697 const cacheEntry = this._cacheGroupCache.get(cacheGroupSource);
698 if (cacheEntry !== undefined) return cacheEntry;
699 const minSize = mergeSizes(
700 cacheGroupSource.minSize,
701 cacheGroupSource.enforce ? undefined : this.options.minSize
702 );
703 const minSizeReduction = mergeSizes(
704 cacheGroupSource.minSizeReduction,
705 cacheGroupSource.enforce ? undefined : this.options.minSizeReduction
706 );
707 const minRemainingSize = mergeSizes(
708 cacheGroupSource.minRemainingSize,
709 cacheGroupSource.enforce ? undefined : this.options.minRemainingSize
710 );
711 const enforceSizeThreshold = mergeSizes(
712 cacheGroupSource.enforceSizeThreshold,
713 cacheGroupSource.enforce ? undefined : this.options.enforceSizeThreshold
714 );
715 const cacheGroup = {
716 key: cacheGroupSource.key,
717 priority: cacheGroupSource.priority || 0,
718 chunksFilter: cacheGroupSource.chunksFilter || this.options.chunksFilter,
719 minSize,
720 minSizeReduction,
721 minRemainingSize,
722 enforceSizeThreshold,
723 maxAsyncSize: mergeSizes(
724 cacheGroupSource.maxAsyncSize,
725 cacheGroupSource.enforce ? undefined : this.options.maxAsyncSize
726 ),
727 maxInitialSize: mergeSizes(
728 cacheGroupSource.maxInitialSize,
729 cacheGroupSource.enforce ? undefined : this.options.maxInitialSize
730 ),
731 minChunks:
732 cacheGroupSource.minChunks !== undefined
733 ? cacheGroupSource.minChunks
734 : cacheGroupSource.enforce
735 ? 1
736 : this.options.minChunks,
737 maxAsyncRequests:
738 cacheGroupSource.maxAsyncRequests !== undefined
739 ? cacheGroupSource.maxAsyncRequests
740 : cacheGroupSource.enforce
741 ? Infinity
742 : this.options.maxAsyncRequests,
743 maxInitialRequests:
744 cacheGroupSource.maxInitialRequests !== undefined
745 ? cacheGroupSource.maxInitialRequests
746 : cacheGroupSource.enforce
747 ? Infinity
748 : this.options.maxInitialRequests,
749 getName:
750 cacheGroupSource.getName !== undefined
751 ? cacheGroupSource.getName
752 : this.options.getName,
753 usedExports:
754 cacheGroupSource.usedExports !== undefined
755 ? cacheGroupSource.usedExports
756 : this.options.usedExports,
757 filename:
758 cacheGroupSource.filename !== undefined
759 ? cacheGroupSource.filename
760 : this.options.filename,
761 automaticNameDelimiter:
762 cacheGroupSource.automaticNameDelimiter !== undefined
763 ? cacheGroupSource.automaticNameDelimiter
764 : this.options.automaticNameDelimiter,
765 idHint:
766 cacheGroupSource.idHint !== undefined
767 ? cacheGroupSource.idHint
768 : cacheGroupSource.key,
769 reuseExistingChunk: cacheGroupSource.reuseExistingChunk || false,
770 _validateSize: hasNonZeroSizes(minSize),
771 _validateRemainingSize: hasNonZeroSizes(minRemainingSize),
772 _minSizeForMaxSize: mergeSizes(
773 cacheGroupSource.minSize,
774 this.options.minSize
775 ),
776 _conditionalEnforce: hasNonZeroSizes(enforceSizeThreshold)
777 };
778 this._cacheGroupCache.set(cacheGroupSource, cacheGroup);
779 return cacheGroup;
780 }
781
782 /**
783 * Apply the plugin
784 * @param {Compiler} compiler the compiler instance
785 * @returns {void}
786 */
787 apply(compiler) {
788 const cachedMakePathsRelative = makePathsRelative.bindContextCache(
789 compiler.context,
790 compiler.root
791 );
792 compiler.hooks.thisCompilation.tap("SplitChunksPlugin", compilation => {
793 const logger = compilation.getLogger("webpack.SplitChunksPlugin");
794 let alreadyOptimized = false;
795 compilation.hooks.unseal.tap("SplitChunksPlugin", () => {
796 alreadyOptimized = false;
797 });
798 compilation.hooks.optimizeChunks.tap(
799 {
800 name: "SplitChunksPlugin",
801 stage: STAGE_ADVANCED
802 },
803 chunks => {
804 if (alreadyOptimized) return;
805 alreadyOptimized = true;
806 logger.time("prepare");
807 const chunkGraph = compilation.chunkGraph;
808 const moduleGraph = compilation.moduleGraph;
809 // Give each selected chunk an index (to create strings from chunks)
810 /** @type {Map<Chunk, bigint>} */
811 const chunkIndexMap = new Map();
812 const ZERO = BigInt("0");
813 const ONE = BigInt("1");
814 const START = ONE << BigInt("31");
815 let index = START;
816 for (const chunk of chunks) {
817 chunkIndexMap.set(
818 chunk,
819 index | BigInt((Math.random() * 0x7fffffff) | 0)
820 );
821 index = index << ONE;
822 }
823 /**
824 * @param {Iterable<Chunk>} chunks list of chunks
825 * @returns {bigint | Chunk} key of the chunks
826 */
827 const getKey = chunks => {
828 const iterator = chunks[Symbol.iterator]();
829 let result = iterator.next();
830 if (result.done) return ZERO;
831 const first = result.value;
832 result = iterator.next();
833 if (result.done) return first;
834 let key =
835 chunkIndexMap.get(first) | chunkIndexMap.get(result.value);
836 while (!(result = iterator.next()).done) {
837 const raw = chunkIndexMap.get(result.value);
838 key = key ^ raw;
839 }
840 return key;
841 };
842 const keyToString = key => {
843 if (typeof key === "bigint") return key.toString(16);
844 return chunkIndexMap.get(key).toString(16);
845 };
846
847 const getChunkSetsInGraph = memoize(() => {
848 /** @type {Map<bigint, Set<Chunk>>} */
849 const chunkSetsInGraph = new Map();
850 /** @type {Set<Chunk>} */
851 const singleChunkSets = new Set();
852 for (const module of compilation.modules) {
853 const chunks = chunkGraph.getModuleChunksIterable(module);
854 const chunksKey = getKey(chunks);
855 if (typeof chunksKey === "bigint") {
856 if (!chunkSetsInGraph.has(chunksKey)) {
857 chunkSetsInGraph.set(chunksKey, new Set(chunks));
858 }
859 } else {
860 singleChunkSets.add(chunksKey);
861 }
862 }
863 return { chunkSetsInGraph, singleChunkSets };
864 });
865
866 /**
867 * @param {Module} module the module
868 * @returns {Iterable<Chunk[]>} groups of chunks with equal exports
869 */
870 const groupChunksByExports = module => {
871 const exportsInfo = moduleGraph.getExportsInfo(module);
872 const groupedByUsedExports = new Map();
873 for (const chunk of chunkGraph.getModuleChunksIterable(module)) {
874 const key = exportsInfo.getUsageKey(chunk.runtime);
875 const list = groupedByUsedExports.get(key);
876 if (list !== undefined) {
877 list.push(chunk);
878 } else {
879 groupedByUsedExports.set(key, [chunk]);
880 }
881 }
882 return groupedByUsedExports.values();
883 };
884
885 /** @type {Map<Module, Iterable<Chunk[]>>} */
886 const groupedByExportsMap = new Map();
887
888 const getExportsChunkSetsInGraph = memoize(() => {
889 /** @type {Map<bigint, Set<Chunk>>} */
890 const chunkSetsInGraph = new Map();
891 /** @type {Set<Chunk>} */
892 const singleChunkSets = new Set();
893 for (const module of compilation.modules) {
894 const groupedChunks = Array.from(groupChunksByExports(module));
895 groupedByExportsMap.set(module, groupedChunks);
896 for (const chunks of groupedChunks) {
897 if (chunks.length === 1) {
898 singleChunkSets.add(chunks[0]);
899 } else {
900 const chunksKey = /** @type {bigint} */ (getKey(chunks));
901 if (!chunkSetsInGraph.has(chunksKey)) {
902 chunkSetsInGraph.set(chunksKey, new Set(chunks));
903 }
904 }
905 }
906 }
907 return { chunkSetsInGraph, singleChunkSets };
908 });
909
910 // group these set of chunks by count
911 // to allow to check less sets via isSubset
912 // (only smaller sets can be subset)
913 const groupChunkSetsByCount = chunkSets => {
914 /** @type {Map<number, Array<Set<Chunk>>>} */
915 const chunkSetsByCount = new Map();
916 for (const chunksSet of chunkSets) {
917 const count = chunksSet.size;
918 let array = chunkSetsByCount.get(count);
919 if (array === undefined) {
920 array = [];
921 chunkSetsByCount.set(count, array);
922 }
923 array.push(chunksSet);
924 }
925 return chunkSetsByCount;
926 };
927 const getChunkSetsByCount = memoize(() =>
928 groupChunkSetsByCount(
929 getChunkSetsInGraph().chunkSetsInGraph.values()
930 )
931 );
932 const getExportsChunkSetsByCount = memoize(() =>
933 groupChunkSetsByCount(
934 getExportsChunkSetsInGraph().chunkSetsInGraph.values()
935 )
936 );
937
938 // Create a list of possible combinations
939 const createGetCombinations = (
940 chunkSets,
941 singleChunkSets,
942 chunkSetsByCount
943 ) => {
944 /** @type {Map<bigint | Chunk, (Set<Chunk> | Chunk)[]>} */
945 const combinationsCache = new Map();
946
947 return key => {
948 const cacheEntry = combinationsCache.get(key);
949 if (cacheEntry !== undefined) return cacheEntry;
950 if (key instanceof Chunk) {
951 const result = [key];
952 combinationsCache.set(key, result);
953 return result;
954 }
955 const chunksSet = chunkSets.get(key);
956 /** @type {(Set<Chunk> | Chunk)[]} */
957 const array = [chunksSet];
958 for (const [count, setArray] of chunkSetsByCount) {
959 // "equal" is not needed because they would have been merge in the first step
960 if (count < chunksSet.size) {
961 for (const set of setArray) {
962 if (isSubset(chunksSet, set)) {
963 array.push(set);
964 }
965 }
966 }
967 }
968 for (const chunk of singleChunkSets) {
969 if (chunksSet.has(chunk)) {
970 array.push(chunk);
971 }
972 }
973 combinationsCache.set(key, array);
974 return array;
975 };
976 };
977
978 const getCombinationsFactory = memoize(() => {
979 const { chunkSetsInGraph, singleChunkSets } = getChunkSetsInGraph();
980 return createGetCombinations(
981 chunkSetsInGraph,
982 singleChunkSets,
983 getChunkSetsByCount()
984 );
985 });
986 const getCombinations = key => getCombinationsFactory()(key);
987
988 const getExportsCombinationsFactory = memoize(() => {
989 const { chunkSetsInGraph, singleChunkSets } =
990 getExportsChunkSetsInGraph();
991 return createGetCombinations(
992 chunkSetsInGraph,
993 singleChunkSets,
994 getExportsChunkSetsByCount()
995 );
996 });
997 const getExportsCombinations = key =>
998 getExportsCombinationsFactory()(key);
999
1000 /**
1001 * @typedef {Object} SelectedChunksResult
1002 * @property {Chunk[]} chunks the list of chunks
1003 * @property {bigint | Chunk} key a key of the list
1004 */
1005
1006 /** @type {WeakMap<Set<Chunk> | Chunk, WeakMap<ChunkFilterFunction, SelectedChunksResult>>} */
1007 const selectedChunksCacheByChunksSet = new WeakMap();
1008
1009 /**
1010 * get list and key by applying the filter function to the list
1011 * It is cached for performance reasons
1012 * @param {Set<Chunk> | Chunk} chunks list of chunks
1013 * @param {ChunkFilterFunction} chunkFilter filter function for chunks
1014 * @returns {SelectedChunksResult} list and key
1015 */
1016 const getSelectedChunks = (chunks, chunkFilter) => {
1017 let entry = selectedChunksCacheByChunksSet.get(chunks);
1018 if (entry === undefined) {
1019 entry = new WeakMap();
1020 selectedChunksCacheByChunksSet.set(chunks, entry);
1021 }
1022 /** @type {SelectedChunksResult} */
1023 let entry2 = entry.get(chunkFilter);
1024 if (entry2 === undefined) {
1025 /** @type {Chunk[]} */
1026 const selectedChunks = [];
1027 if (chunks instanceof Chunk) {
1028 if (chunkFilter(chunks)) selectedChunks.push(chunks);
1029 } else {
1030 for (const chunk of chunks) {
1031 if (chunkFilter(chunk)) selectedChunks.push(chunk);
1032 }
1033 }
1034 entry2 = {
1035 chunks: selectedChunks,
1036 key: getKey(selectedChunks)
1037 };
1038 entry.set(chunkFilter, entry2);
1039 }
1040 return entry2;
1041 };
1042
1043 /** @type {Map<string, boolean>} */
1044 const alreadyValidatedParents = new Map();
1045 /** @type {Set<string>} */
1046 const alreadyReportedErrors = new Set();
1047
1048 // Map a list of chunks to a list of modules
1049 // For the key the chunk "index" is used, the value is a SortableSet of modules
1050 /** @type {Map<string, ChunksInfoItem>} */
1051 const chunksInfoMap = new Map();
1052
1053 /**
1054 * @param {CacheGroup} cacheGroup the current cache group
1055 * @param {number} cacheGroupIndex the index of the cache group of ordering
1056 * @param {Chunk[]} selectedChunks chunks selected for this module
1057 * @param {bigint | Chunk} selectedChunksKey a key of selectedChunks
1058 * @param {Module} module the current module
1059 * @returns {void}
1060 */
1061 const addModuleToChunksInfoMap = (
1062 cacheGroup,
1063 cacheGroupIndex,
1064 selectedChunks,
1065 selectedChunksKey,
1066 module
1067 ) => {
1068 // Break if minimum number of chunks is not reached
1069 if (selectedChunks.length < cacheGroup.minChunks) return;
1070 // Determine name for split chunk
1071 const name = cacheGroup.getName(
1072 module,
1073 selectedChunks,
1074 cacheGroup.key
1075 );
1076 // Check if the name is ok
1077 const existingChunk = compilation.namedChunks.get(name);
1078 if (existingChunk) {
1079 const parentValidationKey = `${name}|${
1080 typeof selectedChunksKey === "bigint"
1081 ? selectedChunksKey
1082 : selectedChunksKey.debugId
1083 }`;
1084 const valid = alreadyValidatedParents.get(parentValidationKey);
1085 if (valid === false) return;
1086 if (valid === undefined) {
1087 // Module can only be moved into the existing chunk if the existing chunk
1088 // is a parent of all selected chunks
1089 let isInAllParents = true;
1090 /** @type {Set<ChunkGroup>} */
1091 const queue = new Set();
1092 for (const chunk of selectedChunks) {
1093 for (const group of chunk.groupsIterable) {
1094 queue.add(group);
1095 }
1096 }
1097 for (const group of queue) {
1098 if (existingChunk.isInGroup(group)) continue;
1099 let hasParent = false;
1100 for (const parent of group.parentsIterable) {
1101 hasParent = true;
1102 queue.add(parent);
1103 }
1104 if (!hasParent) {
1105 isInAllParents = false;
1106 }
1107 }
1108 const valid = isInAllParents;
1109 alreadyValidatedParents.set(parentValidationKey, valid);
1110 if (!valid) {
1111 if (!alreadyReportedErrors.has(name)) {
1112 alreadyReportedErrors.add(name);
1113 compilation.errors.push(
1114 new WebpackError(
1115 "SplitChunksPlugin\n" +
1116 `Cache group "${cacheGroup.key}" conflicts with existing chunk.\n` +
1117 `Both have the same name "${name}" and existing chunk is not a parent of the selected modules.\n` +
1118 "Use a different name for the cache group or make sure that the existing chunk is a parent (e. g. via dependOn).\n" +
1119 'HINT: You can omit "name" to automatically create a name.\n' +
1120 "BREAKING CHANGE: webpack < 5 used to allow to use an entrypoint as splitChunk. " +
1121 "This is no longer allowed when the entrypoint is not a parent of the selected modules.\n" +
1122 "Remove this entrypoint and add modules to cache group's 'test' instead. " +
1123 "If you need modules to be evaluated on startup, add them to the existing entrypoints (make them arrays). " +
1124 "See migration guide of more info."
1125 )
1126 );
1127 }
1128 return;
1129 }
1130 }
1131 }
1132 // Create key for maps
1133 // When it has a name we use the name as key
1134 // Otherwise we create the key from chunks and cache group key
1135 // This automatically merges equal names
1136 const key =
1137 cacheGroup.key +
1138 (name
1139 ? ` name:${name}`
1140 : ` chunks:${keyToString(selectedChunksKey)}`);
1141 // Add module to maps
1142 let info = chunksInfoMap.get(key);
1143 if (info === undefined) {
1144 chunksInfoMap.set(
1145 key,
1146 (info = {
1147 modules: new SortableSet(
1148 undefined,
1149 compareModulesByIdentifier
1150 ),
1151 cacheGroup,
1152 cacheGroupIndex,
1153 name,
1154 sizes: {},
1155 chunks: new Set(),
1156 reuseableChunks: new Set(),
1157 chunksKeys: new Set()
1158 })
1159 );
1160 }
1161 const oldSize = info.modules.size;
1162 info.modules.add(module);
1163 if (info.modules.size !== oldSize) {
1164 for (const type of module.getSourceTypes()) {
1165 info.sizes[type] = (info.sizes[type] || 0) + module.size(type);
1166 }
1167 }
1168 const oldChunksKeysSize = info.chunksKeys.size;
1169 info.chunksKeys.add(selectedChunksKey);
1170 if (oldChunksKeysSize !== info.chunksKeys.size) {
1171 for (const chunk of selectedChunks) {
1172 info.chunks.add(chunk);
1173 }
1174 }
1175 };
1176
1177 const context = {
1178 moduleGraph,
1179 chunkGraph
1180 };
1181
1182 logger.timeEnd("prepare");
1183
1184 logger.time("modules");
1185
1186 // Walk through all modules
1187 for (const module of compilation.modules) {
1188 // Get cache group
1189 let cacheGroups = this.options.getCacheGroups(module, context);
1190 if (!Array.isArray(cacheGroups) || cacheGroups.length === 0) {
1191 continue;
1192 }
1193
1194 // Prepare some values (usedExports = false)
1195 const getCombs = memoize(() => {
1196 const chunks = chunkGraph.getModuleChunksIterable(module);
1197 const chunksKey = getKey(chunks);
1198 return getCombinations(chunksKey);
1199 });
1200
1201 // Prepare some values (usedExports = true)
1202 const getCombsByUsedExports = memoize(() => {
1203 // fill the groupedByExportsMap
1204 getExportsChunkSetsInGraph();
1205 /** @type {Set<Set<Chunk> | Chunk>} */
1206 const set = new Set();
1207 const groupedByUsedExports = groupedByExportsMap.get(module);
1208 for (const chunks of groupedByUsedExports) {
1209 const chunksKey = getKey(chunks);
1210 for (const comb of getExportsCombinations(chunksKey))
1211 set.add(comb);
1212 }
1213 return set;
1214 });
1215
1216 let cacheGroupIndex = 0;
1217 for (const cacheGroupSource of cacheGroups) {
1218 const cacheGroup = this._getCacheGroup(cacheGroupSource);
1219
1220 const combs = cacheGroup.usedExports
1221 ? getCombsByUsedExports()
1222 : getCombs();
1223 // For all combination of chunk selection
1224 for (const chunkCombination of combs) {
1225 // Break if minimum number of chunks is not reached
1226 const count =
1227 chunkCombination instanceof Chunk ? 1 : chunkCombination.size;
1228 if (count < cacheGroup.minChunks) continue;
1229 // Select chunks by configuration
1230 const { chunks: selectedChunks, key: selectedChunksKey } =
1231 getSelectedChunks(chunkCombination, cacheGroup.chunksFilter);
1232
1233 addModuleToChunksInfoMap(
1234 cacheGroup,
1235 cacheGroupIndex,
1236 selectedChunks,
1237 selectedChunksKey,
1238 module
1239 );
1240 }
1241 cacheGroupIndex++;
1242 }
1243 }
1244
1245 logger.timeEnd("modules");
1246
1247 logger.time("queue");
1248
1249 /**
1250 * @param {ChunksInfoItem} info entry
1251 * @param {string[]} sourceTypes source types to be removed
1252 */
1253 const removeModulesWithSourceType = (info, sourceTypes) => {
1254 for (const module of info.modules) {
1255 const types = module.getSourceTypes();
1256 if (sourceTypes.some(type => types.has(type))) {
1257 info.modules.delete(module);
1258 for (const type of types) {
1259 info.sizes[type] -= module.size(type);
1260 }
1261 }
1262 }
1263 };
1264
1265 /**
1266 * @param {ChunksInfoItem} info entry
1267 * @returns {boolean} true, if entry become empty
1268 */
1269 const removeMinSizeViolatingModules = info => {
1270 if (!info.cacheGroup._validateSize) return false;
1271 const violatingSizes = getViolatingMinSizes(
1272 info.sizes,
1273 info.cacheGroup.minSize
1274 );
1275 if (violatingSizes === undefined) return false;
1276 removeModulesWithSourceType(info, violatingSizes);
1277 return info.modules.size === 0;
1278 };
1279
1280 // Filter items were size < minSize
1281 for (const [key, info] of chunksInfoMap) {
1282 if (removeMinSizeViolatingModules(info)) {
1283 chunksInfoMap.delete(key);
1284 } else if (
1285 !checkMinSizeReduction(
1286 info.sizes,
1287 info.cacheGroup.minSizeReduction,
1288 info.chunks.size
1289 )
1290 ) {
1291 chunksInfoMap.delete(key);
1292 }
1293 }
1294
1295 /**
1296 * @typedef {Object} MaxSizeQueueItem
1297 * @property {SplitChunksSizes} minSize
1298 * @property {SplitChunksSizes} maxAsyncSize
1299 * @property {SplitChunksSizes} maxInitialSize
1300 * @property {string} automaticNameDelimiter
1301 * @property {string[]} keys
1302 */
1303
1304 /** @type {Map<Chunk, MaxSizeQueueItem>} */
1305 const maxSizeQueueMap = new Map();
1306
1307 while (chunksInfoMap.size > 0) {
1308 // Find best matching entry
1309 let bestEntryKey;
1310 let bestEntry;
1311 for (const pair of chunksInfoMap) {
1312 const key = pair[0];
1313 const info = pair[1];
1314 if (
1315 bestEntry === undefined ||
1316 compareEntries(bestEntry, info) < 0
1317 ) {
1318 bestEntry = info;
1319 bestEntryKey = key;
1320 }
1321 }
1322
1323 const item = bestEntry;
1324 chunksInfoMap.delete(bestEntryKey);
1325
1326 let chunkName = item.name;
1327 // Variable for the new chunk (lazy created)
1328 /** @type {Chunk} */
1329 let newChunk;
1330 // When no chunk name, check if we can reuse a chunk instead of creating a new one
1331 let isExistingChunk = false;
1332 let isReusedWithAllModules = false;
1333 if (chunkName) {
1334 const chunkByName = compilation.namedChunks.get(chunkName);
1335 if (chunkByName !== undefined) {
1336 newChunk = chunkByName;
1337 const oldSize = item.chunks.size;
1338 item.chunks.delete(newChunk);
1339 isExistingChunk = item.chunks.size !== oldSize;
1340 }
1341 } else if (item.cacheGroup.reuseExistingChunk) {
1342 outer: for (const chunk of item.chunks) {
1343 if (
1344 chunkGraph.getNumberOfChunkModules(chunk) !==
1345 item.modules.size
1346 ) {
1347 continue;
1348 }
1349 if (
1350 item.chunks.size > 1 &&
1351 chunkGraph.getNumberOfEntryModules(chunk) > 0
1352 ) {
1353 continue;
1354 }
1355 for (const module of item.modules) {
1356 if (!chunkGraph.isModuleInChunk(module, chunk)) {
1357 continue outer;
1358 }
1359 }
1360 if (!newChunk || !newChunk.name) {
1361 newChunk = chunk;
1362 } else if (
1363 chunk.name &&
1364 chunk.name.length < newChunk.name.length
1365 ) {
1366 newChunk = chunk;
1367 } else if (
1368 chunk.name &&
1369 chunk.name.length === newChunk.name.length &&
1370 chunk.name < newChunk.name
1371 ) {
1372 newChunk = chunk;
1373 }
1374 }
1375 if (newChunk) {
1376 item.chunks.delete(newChunk);
1377 chunkName = undefined;
1378 isExistingChunk = true;
1379 isReusedWithAllModules = true;
1380 }
1381 }
1382
1383 const enforced =
1384 item.cacheGroup._conditionalEnforce &&
1385 checkMinSize(item.sizes, item.cacheGroup.enforceSizeThreshold);
1386
1387 const usedChunks = new Set(item.chunks);
1388
1389 // Check if maxRequests condition can be fulfilled
1390 if (
1391 !enforced &&
1392 (Number.isFinite(item.cacheGroup.maxInitialRequests) ||
1393 Number.isFinite(item.cacheGroup.maxAsyncRequests))
1394 ) {
1395 for (const chunk of usedChunks) {
1396 // respect max requests
1397 const maxRequests = chunk.isOnlyInitial()
1398 ? item.cacheGroup.maxInitialRequests
1399 : chunk.canBeInitial()
1400 ? Math.min(
1401 item.cacheGroup.maxInitialRequests,
1402 item.cacheGroup.maxAsyncRequests
1403 )
1404 : item.cacheGroup.maxAsyncRequests;
1405 if (
1406 isFinite(maxRequests) &&
1407 getRequests(chunk) >= maxRequests
1408 ) {
1409 usedChunks.delete(chunk);
1410 }
1411 }
1412 }
1413
1414 outer: for (const chunk of usedChunks) {
1415 for (const module of item.modules) {
1416 if (chunkGraph.isModuleInChunk(module, chunk)) continue outer;
1417 }
1418 usedChunks.delete(chunk);
1419 }
1420
1421 // Were some (invalid) chunks removed from usedChunks?
1422 // => readd all modules to the queue, as things could have been changed
1423 if (usedChunks.size < item.chunks.size) {
1424 if (isExistingChunk) usedChunks.add(newChunk);
1425 if (usedChunks.size >= item.cacheGroup.minChunks) {
1426 const chunksArr = Array.from(usedChunks);
1427 for (const module of item.modules) {
1428 addModuleToChunksInfoMap(
1429 item.cacheGroup,
1430 item.cacheGroupIndex,
1431 chunksArr,
1432 getKey(usedChunks),
1433 module
1434 );
1435 }
1436 }
1437 continue;
1438 }
1439
1440 // Validate minRemainingSize constraint when a single chunk is left over
1441 if (
1442 !enforced &&
1443 item.cacheGroup._validateRemainingSize &&
1444 usedChunks.size === 1
1445 ) {
1446 const [chunk] = usedChunks;
1447 let chunkSizes = Object.create(null);
1448 for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
1449 if (!item.modules.has(module)) {
1450 for (const type of module.getSourceTypes()) {
1451 chunkSizes[type] =
1452 (chunkSizes[type] || 0) + module.size(type);
1453 }
1454 }
1455 }
1456 const violatingSizes = getViolatingMinSizes(
1457 chunkSizes,
1458 item.cacheGroup.minRemainingSize
1459 );
1460 if (violatingSizes !== undefined) {
1461 const oldModulesSize = item.modules.size;
1462 removeModulesWithSourceType(item, violatingSizes);
1463 if (
1464 item.modules.size > 0 &&
1465 item.modules.size !== oldModulesSize
1466 ) {
1467 // queue this item again to be processed again
1468 // without violating modules
1469 chunksInfoMap.set(bestEntryKey, item);
1470 }
1471 continue;
1472 }
1473 }
1474
1475 // Create the new chunk if not reusing one
1476 if (newChunk === undefined) {
1477 newChunk = compilation.addChunk(chunkName);
1478 }
1479 // Walk through all chunks
1480 for (const chunk of usedChunks) {
1481 // Add graph connections for splitted chunk
1482 chunk.split(newChunk);
1483 }
1484
1485 // Add a note to the chunk
1486 newChunk.chunkReason =
1487 (newChunk.chunkReason ? newChunk.chunkReason + ", " : "") +
1488 (isReusedWithAllModules
1489 ? "reused as split chunk"
1490 : "split chunk");
1491 if (item.cacheGroup.key) {
1492 newChunk.chunkReason += ` (cache group: ${item.cacheGroup.key})`;
1493 }
1494 if (chunkName) {
1495 newChunk.chunkReason += ` (name: ${chunkName})`;
1496 }
1497 if (item.cacheGroup.filename) {
1498 newChunk.filenameTemplate = item.cacheGroup.filename;
1499 }
1500 if (item.cacheGroup.idHint) {
1501 newChunk.idNameHints.add(item.cacheGroup.idHint);
1502 }
1503 if (!isReusedWithAllModules) {
1504 // Add all modules to the new chunk
1505 for (const module of item.modules) {
1506 if (!module.chunkCondition(newChunk, compilation)) continue;
1507 // Add module to new chunk
1508 chunkGraph.connectChunkAndModule(newChunk, module);
1509 // Remove module from used chunks
1510 for (const chunk of usedChunks) {
1511 chunkGraph.disconnectChunkAndModule(chunk, module);
1512 }
1513 }
1514 } else {
1515 // Remove all modules from used chunks
1516 for (const module of item.modules) {
1517 for (const chunk of usedChunks) {
1518 chunkGraph.disconnectChunkAndModule(chunk, module);
1519 }
1520 }
1521 }
1522
1523 if (
1524 Object.keys(item.cacheGroup.maxAsyncSize).length > 0 ||
1525 Object.keys(item.cacheGroup.maxInitialSize).length > 0
1526 ) {
1527 const oldMaxSizeSettings = maxSizeQueueMap.get(newChunk);
1528 maxSizeQueueMap.set(newChunk, {
1529 minSize: oldMaxSizeSettings
1530 ? combineSizes(
1531 oldMaxSizeSettings.minSize,
1532 item.cacheGroup._minSizeForMaxSize,
1533 Math.max
1534 )
1535 : item.cacheGroup.minSize,
1536 maxAsyncSize: oldMaxSizeSettings
1537 ? combineSizes(
1538 oldMaxSizeSettings.maxAsyncSize,
1539 item.cacheGroup.maxAsyncSize,
1540 Math.min
1541 )
1542 : item.cacheGroup.maxAsyncSize,
1543 maxInitialSize: oldMaxSizeSettings
1544 ? combineSizes(
1545 oldMaxSizeSettings.maxInitialSize,
1546 item.cacheGroup.maxInitialSize,
1547 Math.min
1548 )
1549 : item.cacheGroup.maxInitialSize,
1550 automaticNameDelimiter: item.cacheGroup.automaticNameDelimiter,
1551 keys: oldMaxSizeSettings
1552 ? oldMaxSizeSettings.keys.concat(item.cacheGroup.key)
1553 : [item.cacheGroup.key]
1554 });
1555 }
1556
1557 // remove all modules from other entries and update size
1558 for (const [key, info] of chunksInfoMap) {
1559 if (isOverlap(info.chunks, usedChunks)) {
1560 // update modules and total size
1561 // may remove it from the map when < minSize
1562 let updated = false;
1563 for (const module of item.modules) {
1564 if (info.modules.has(module)) {
1565 // remove module
1566 info.modules.delete(module);
1567 // update size
1568 for (const key of module.getSourceTypes()) {
1569 info.sizes[key] -= module.size(key);
1570 }
1571 updated = true;
1572 }
1573 }
1574 if (updated) {
1575 if (info.modules.size === 0) {
1576 chunksInfoMap.delete(key);
1577 continue;
1578 }
1579 if (
1580 removeMinSizeViolatingModules(info) ||
1581 !checkMinSizeReduction(
1582 info.sizes,
1583 info.cacheGroup.minSizeReduction,
1584 info.chunks.size
1585 )
1586 ) {
1587 chunksInfoMap.delete(key);
1588 continue;
1589 }
1590 }
1591 }
1592 }
1593 }
1594
1595 logger.timeEnd("queue");
1596
1597 logger.time("maxSize");
1598
1599 /** @type {Set<string>} */
1600 const incorrectMinMaxSizeSet = new Set();
1601
1602 const { outputOptions } = compilation;
1603
1604 // Make sure that maxSize is fulfilled
1605 const { fallbackCacheGroup } = this.options;
1606 for (const chunk of Array.from(compilation.chunks)) {
1607 const chunkConfig = maxSizeQueueMap.get(chunk);
1608 const {
1609 minSize,
1610 maxAsyncSize,
1611 maxInitialSize,
1612 automaticNameDelimiter
1613 } = chunkConfig || fallbackCacheGroup;
1614 if (!chunkConfig && !fallbackCacheGroup.chunksFilter(chunk))
1615 continue;
1616 /** @type {SplitChunksSizes} */
1617 let maxSize;
1618 if (chunk.isOnlyInitial()) {
1619 maxSize = maxInitialSize;
1620 } else if (chunk.canBeInitial()) {
1621 maxSize = combineSizes(maxAsyncSize, maxInitialSize, Math.min);
1622 } else {
1623 maxSize = maxAsyncSize;
1624 }
1625 if (Object.keys(maxSize).length === 0) {
1626 continue;
1627 }
1628 for (const key of Object.keys(maxSize)) {
1629 const maxSizeValue = maxSize[key];
1630 const minSizeValue = minSize[key];
1631 if (
1632 typeof minSizeValue === "number" &&
1633 minSizeValue > maxSizeValue
1634 ) {
1635 const keys = chunkConfig && chunkConfig.keys;
1636 const warningKey = `${
1637 keys && keys.join()
1638 } ${minSizeValue} ${maxSizeValue}`;
1639 if (!incorrectMinMaxSizeSet.has(warningKey)) {
1640 incorrectMinMaxSizeSet.add(warningKey);
1641 compilation.warnings.push(
1642 new MinMaxSizeWarning(keys, minSizeValue, maxSizeValue)
1643 );
1644 }
1645 }
1646 }
1647 const results = deterministicGroupingForModules({
1648 minSize,
1649 maxSize: mapObject(maxSize, (value, key) => {
1650 const minSizeValue = minSize[key];
1651 return typeof minSizeValue === "number"
1652 ? Math.max(value, minSizeValue)
1653 : value;
1654 }),
1655 items: chunkGraph.getChunkModulesIterable(chunk),
1656 getKey(module) {
1657 const cache = getKeyCache.get(module);
1658 if (cache !== undefined) return cache;
1659 const ident = cachedMakePathsRelative(module.identifier());
1660 const nameForCondition =
1661 module.nameForCondition && module.nameForCondition();
1662 const name = nameForCondition
1663 ? cachedMakePathsRelative(nameForCondition)
1664 : ident.replace(/^.*!|\?[^?!]*$/g, "");
1665 const fullKey =
1666 name +
1667 automaticNameDelimiter +
1668 hashFilename(ident, outputOptions);
1669 const key = requestToId(fullKey);
1670 getKeyCache.set(module, key);
1671 return key;
1672 },
1673 getSize(module) {
1674 const size = Object.create(null);
1675 for (const key of module.getSourceTypes()) {
1676 size[key] = module.size(key);
1677 }
1678 return size;
1679 }
1680 });
1681 if (results.length <= 1) {
1682 continue;
1683 }
1684 for (let i = 0; i < results.length; i++) {
1685 const group = results[i];
1686 const key = this.options.hidePathInfo
1687 ? hashFilename(group.key, outputOptions)
1688 : group.key;
1689 let name = chunk.name
1690 ? chunk.name + automaticNameDelimiter + key
1691 : null;
1692 if (name && name.length > 100) {
1693 name =
1694 name.slice(0, 100) +
1695 automaticNameDelimiter +
1696 hashFilename(name, outputOptions);
1697 }
1698 if (i !== results.length - 1) {
1699 const newPart = compilation.addChunk(name);
1700 chunk.split(newPart);
1701 newPart.chunkReason = chunk.chunkReason;
1702 // Add all modules to the new chunk
1703 for (const module of group.items) {
1704 if (!module.chunkCondition(newPart, compilation)) {
1705 continue;
1706 }
1707 // Add module to new chunk
1708 chunkGraph.connectChunkAndModule(newPart, module);
1709 // Remove module from used chunks
1710 chunkGraph.disconnectChunkAndModule(chunk, module);
1711 }
1712 } else {
1713 // change the chunk to be a part
1714 chunk.name = name;
1715 }
1716 }
1717 }
1718 logger.timeEnd("maxSize");
1719 }
1720 );
1721 });
1722 }
1723};