UNPKG

54.3 kBJavaScriptView Raw
1/*
2 MIT License http://www.opensource.org/licenses/mit-license.php
3 Author Tobias Koppers @sokra
4*/
5
6"use strict";
7
8const Chunk = require("../Chunk");
9const { STAGE_ADVANCED } = require("../OptimizationStages");
10const WebpackError = require("../WebpackError");
11const { requestToId } = require("../ids/IdHelpers");
12const { isSubset } = require("../util/SetHelpers");
13const SortableSet = require("../util/SortableSet");
14const {
15 compareModulesByIdentifier,
16 compareIterables
17} = require("../util/comparators");
18const createHash = require("../util/createHash");
19const deterministicGrouping = require("../util/deterministicGrouping");
20const { makePathsRelative } = require("../util/identifier");
21const memoize = require("../util/memoize");
22const MinMaxSizeWarning = require("./MinMaxSizeWarning");
23
24/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksCacheGroup} OptimizationSplitChunksCacheGroup */
25/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksGetCacheGroups} OptimizationSplitChunksGetCacheGroups */
26/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksOptions} OptimizationSplitChunksOptions */
27/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksSizes} OptimizationSplitChunksSizes */
28/** @typedef {import("../../declarations/WebpackOptions").Output} OutputOptions */
29/** @typedef {import("../ChunkGraph")} ChunkGraph */
30/** @typedef {import("../ChunkGroup")} ChunkGroup */
31/** @typedef {import("../Compilation").AssetInfo} AssetInfo */
32/** @typedef {import("../Compilation").PathData} PathData */
33/** @typedef {import("../Compiler")} Compiler */
34/** @typedef {import("../Module")} Module */
35/** @typedef {import("../ModuleGraph")} ModuleGraph */
36/** @typedef {import("../util/deterministicGrouping").GroupedItems<Module>} DeterministicGroupingGroupedItemsForModule */
37/** @typedef {import("../util/deterministicGrouping").Options<Module>} DeterministicGroupingOptionsForModule */
38
39/** @typedef {Record<string, number>} SplitChunksSizes */
40
41/**
42 * @callback ChunkFilterFunction
43 * @param {Chunk} chunk
44 * @returns {boolean}
45 */
46
47/**
48 * @callback CombineSizeFunction
49 * @param {number} a
50 * @param {number} b
51 * @returns {number}
52 */
53
54/**
55 * @typedef {Object} CacheGroupSource
56 * @property {string=} key
57 * @property {number=} priority
58 * @property {GetName=} getName
59 * @property {ChunkFilterFunction=} chunksFilter
60 * @property {boolean=} enforce
61 * @property {SplitChunksSizes} minSize
62 * @property {SplitChunksSizes} minSizeReduction
63 * @property {SplitChunksSizes} minRemainingSize
64 * @property {SplitChunksSizes} enforceSizeThreshold
65 * @property {SplitChunksSizes} maxAsyncSize
66 * @property {SplitChunksSizes} maxInitialSize
67 * @property {number=} minChunks
68 * @property {number=} maxAsyncRequests
69 * @property {number=} maxInitialRequests
70 * @property {(string | function(PathData, AssetInfo=): string)=} filename
71 * @property {string=} idHint
72 * @property {string} automaticNameDelimiter
73 * @property {boolean=} reuseExistingChunk
74 * @property {boolean=} usedExports
75 */
76
77/**
78 * @typedef {Object} CacheGroup
79 * @property {string} key
80 * @property {number=} priority
81 * @property {GetName=} getName
82 * @property {ChunkFilterFunction=} chunksFilter
83 * @property {SplitChunksSizes} minSize
84 * @property {SplitChunksSizes} minSizeReduction
85 * @property {SplitChunksSizes} minRemainingSize
86 * @property {SplitChunksSizes} enforceSizeThreshold
87 * @property {SplitChunksSizes} maxAsyncSize
88 * @property {SplitChunksSizes} maxInitialSize
89 * @property {number=} minChunks
90 * @property {number=} maxAsyncRequests
91 * @property {number=} maxInitialRequests
92 * @property {(string | function(PathData, AssetInfo=): string)=} filename
93 * @property {string=} idHint
94 * @property {string} automaticNameDelimiter
95 * @property {boolean} reuseExistingChunk
96 * @property {boolean} usedExports
97 * @property {boolean} _validateSize
98 * @property {boolean} _validateRemainingSize
99 * @property {SplitChunksSizes} _minSizeForMaxSize
100 * @property {boolean} _conditionalEnforce
101 */
102
103/**
104 * @typedef {Object} FallbackCacheGroup
105 * @property {SplitChunksSizes} minSize
106 * @property {SplitChunksSizes} maxAsyncSize
107 * @property {SplitChunksSizes} maxInitialSize
108 * @property {string} automaticNameDelimiter
109 */
110
111/**
112 * @typedef {Object} CacheGroupsContext
113 * @property {ModuleGraph} moduleGraph
114 * @property {ChunkGraph} chunkGraph
115 */
116
117/**
118 * @callback GetCacheGroups
119 * @param {Module} module
120 * @param {CacheGroupsContext} context
121 * @returns {CacheGroupSource[]}
122 */
123
124/**
125 * @callback GetName
126 * @param {Module=} module
127 * @param {Chunk[]=} chunks
128 * @param {string=} key
129 * @returns {string=}
130 */
131
132/**
133 * @typedef {Object} SplitChunksOptions
134 * @property {ChunkFilterFunction} chunksFilter
135 * @property {string[]} defaultSizeTypes
136 * @property {SplitChunksSizes} minSize
137 * @property {SplitChunksSizes} minSizeReduction
138 * @property {SplitChunksSizes} minRemainingSize
139 * @property {SplitChunksSizes} enforceSizeThreshold
140 * @property {SplitChunksSizes} maxInitialSize
141 * @property {SplitChunksSizes} maxAsyncSize
142 * @property {number} minChunks
143 * @property {number} maxAsyncRequests
144 * @property {number} maxInitialRequests
145 * @property {boolean} hidePathInfo
146 * @property {string | function(PathData, AssetInfo=): string} filename
147 * @property {string} automaticNameDelimiter
148 * @property {GetCacheGroups} getCacheGroups
149 * @property {GetName} getName
150 * @property {boolean} usedExports
151 * @property {FallbackCacheGroup} fallbackCacheGroup
152 */
153
154/**
155 * @typedef {Object} ChunksInfoItem
156 * @property {SortableSet<Module>} modules
157 * @property {CacheGroup} cacheGroup
158 * @property {number} cacheGroupIndex
159 * @property {string} name
160 * @property {Record<string, number>} sizes
161 * @property {Set<Chunk>} chunks
162 * @property {Set<Chunk>} reuseableChunks
163 * @property {Set<bigint | Chunk>} chunksKeys
164 */
165
166const defaultGetName = /** @type {GetName} */ (() => {});
167
168const deterministicGroupingForModules =
169 /** @type {function(DeterministicGroupingOptionsForModule): DeterministicGroupingGroupedItemsForModule[]} */ (
170 deterministicGrouping
171 );
172
173/** @type {WeakMap<Module, string>} */
174const getKeyCache = new WeakMap();
175
176/**
177 * @param {string} name a filename to hash
178 * @param {OutputOptions} outputOptions hash function used
179 * @returns {string} hashed filename
180 */
181const hashFilename = (name, outputOptions) => {
182 const digest = /** @type {string} */ (
183 createHash(outputOptions.hashFunction)
184 .update(name)
185 .digest(outputOptions.hashDigest)
186 );
187 return digest.slice(0, 8);
188};
189
190/**
191 * @param {Chunk} chunk the chunk
192 * @returns {number} the number of requests
193 */
194const getRequests = chunk => {
195 let requests = 0;
196 for (const chunkGroup of chunk.groupsIterable) {
197 requests = Math.max(requests, chunkGroup.chunks.length);
198 }
199 return requests;
200};
201
202const mapObject = (obj, fn) => {
203 const newObj = Object.create(null);
204 for (const key of Object.keys(obj)) {
205 newObj[key] = fn(obj[key], key);
206 }
207 return newObj;
208};
209
210/**
211 * @template T
212 * @param {Set<T>} a set
213 * @param {Set<T>} b other set
214 * @returns {boolean} true if at least one item of a is in b
215 */
216const isOverlap = (a, b) => {
217 for (const item of a) {
218 if (b.has(item)) return true;
219 }
220 return false;
221};
222
223const compareModuleIterables = compareIterables(compareModulesByIdentifier);
224
225/**
226 * @param {ChunksInfoItem} a item
227 * @param {ChunksInfoItem} b item
228 * @returns {number} compare result
229 */
230const compareEntries = (a, b) => {
231 // 1. by priority
232 const diffPriority = a.cacheGroup.priority - b.cacheGroup.priority;
233 if (diffPriority) return diffPriority;
234 // 2. by number of chunks
235 const diffCount = a.chunks.size - b.chunks.size;
236 if (diffCount) return diffCount;
237 // 3. by size reduction
238 const aSizeReduce = totalSize(a.sizes) * (a.chunks.size - 1);
239 const bSizeReduce = totalSize(b.sizes) * (b.chunks.size - 1);
240 const diffSizeReduce = aSizeReduce - bSizeReduce;
241 if (diffSizeReduce) return diffSizeReduce;
242 // 4. by cache group index
243 const indexDiff = b.cacheGroupIndex - a.cacheGroupIndex;
244 if (indexDiff) return indexDiff;
245 // 5. by number of modules (to be able to compare by identifier)
246 const modulesA = a.modules;
247 const modulesB = b.modules;
248 const diff = modulesA.size - modulesB.size;
249 if (diff) return diff;
250 // 6. by module identifiers
251 modulesA.sort();
252 modulesB.sort();
253 return compareModuleIterables(modulesA, modulesB);
254};
255
256const INITIAL_CHUNK_FILTER = chunk => chunk.canBeInitial();
257const ASYNC_CHUNK_FILTER = chunk => !chunk.canBeInitial();
258const ALL_CHUNK_FILTER = chunk => true;
259
260/**
261 * @param {OptimizationSplitChunksSizes} value the sizes
262 * @param {string[]} defaultSizeTypes the default size types
263 * @returns {SplitChunksSizes} normalized representation
264 */
265const normalizeSizes = (value, defaultSizeTypes) => {
266 if (typeof value === "number") {
267 /** @type {Record<string, number>} */
268 const o = {};
269 for (const sizeType of defaultSizeTypes) o[sizeType] = value;
270 return o;
271 } else if (typeof value === "object" && value !== null) {
272 return { ...value };
273 } else {
274 return {};
275 }
276};
277
278/**
279 * @param {...SplitChunksSizes} sizes the sizes
280 * @returns {SplitChunksSizes} the merged sizes
281 */
282const mergeSizes = (...sizes) => {
283 /** @type {SplitChunksSizes} */
284 let merged = {};
285 for (let i = sizes.length - 1; i >= 0; i--) {
286 merged = Object.assign(merged, sizes[i]);
287 }
288 return merged;
289};
290
291/**
292 * @param {SplitChunksSizes} sizes the sizes
293 * @returns {boolean} true, if there are sizes > 0
294 */
295const hasNonZeroSizes = sizes => {
296 for (const key of Object.keys(sizes)) {
297 if (sizes[key] > 0) return true;
298 }
299 return false;
300};
301
302/**
303 * @param {SplitChunksSizes} a first sizes
304 * @param {SplitChunksSizes} b second sizes
305 * @param {CombineSizeFunction} combine a function to combine sizes
306 * @returns {SplitChunksSizes} the combine sizes
307 */
308const combineSizes = (a, b, combine) => {
309 const aKeys = new Set(Object.keys(a));
310 const bKeys = new Set(Object.keys(b));
311 /** @type {SplitChunksSizes} */
312 const result = {};
313 for (const key of aKeys) {
314 if (bKeys.has(key)) {
315 result[key] = combine(a[key], b[key]);
316 } else {
317 result[key] = a[key];
318 }
319 }
320 for (const key of bKeys) {
321 if (!aKeys.has(key)) {
322 result[key] = b[key];
323 }
324 }
325 return result;
326};
327
328/**
329 * @param {SplitChunksSizes} sizes the sizes
330 * @param {SplitChunksSizes} minSize the min sizes
331 * @returns {boolean} true if there are sizes and all existing sizes are at least `minSize`
332 */
333const checkMinSize = (sizes, minSize) => {
334 for (const key of Object.keys(minSize)) {
335 const size = sizes[key];
336 if (size === undefined || size === 0) continue;
337 if (size < minSize[key]) return false;
338 }
339 return true;
340};
341
342/**
343 * @param {SplitChunksSizes} sizes the sizes
344 * @param {SplitChunksSizes} minSizeReduction the min sizes
345 * @param {number} chunkCount number of chunks
346 * @returns {boolean} true if there are sizes and all existing sizes are at least `minSizeReduction`
347 */
348const checkMinSizeReduction = (sizes, minSizeReduction, chunkCount) => {
349 for (const key of Object.keys(minSizeReduction)) {
350 const size = sizes[key];
351 if (size === undefined || size === 0) continue;
352 if (size * chunkCount < minSizeReduction[key]) return false;
353 }
354 return true;
355};
356
357/**
358 * @param {SplitChunksSizes} sizes the sizes
359 * @param {SplitChunksSizes} minSize the min sizes
360 * @returns {undefined | string[]} list of size types that are below min size
361 */
362const getViolatingMinSizes = (sizes, minSize) => {
363 let list;
364 for (const key of Object.keys(minSize)) {
365 const size = sizes[key];
366 if (size === undefined || size === 0) continue;
367 if (size < minSize[key]) {
368 if (list === undefined) list = [key];
369 else list.push(key);
370 }
371 }
372 return list;
373};
374
375/**
376 * @param {SplitChunksSizes} sizes the sizes
377 * @returns {number} the total size
378 */
379const totalSize = sizes => {
380 let size = 0;
381 for (const key of Object.keys(sizes)) {
382 size += sizes[key];
383 }
384 return size;
385};
386
387/**
388 * @param {false|string|Function} name the chunk name
389 * @returns {GetName} a function to get the name of the chunk
390 */
391const normalizeName = name => {
392 if (typeof name === "string") {
393 return () => name;
394 }
395 if (typeof name === "function") {
396 return /** @type {GetName} */ (name);
397 }
398};
399
400/**
401 * @param {OptimizationSplitChunksCacheGroup["chunks"]} chunks the chunk filter option
402 * @returns {ChunkFilterFunction} the chunk filter function
403 */
404const normalizeChunksFilter = chunks => {
405 if (chunks === "initial") {
406 return INITIAL_CHUNK_FILTER;
407 }
408 if (chunks === "async") {
409 return ASYNC_CHUNK_FILTER;
410 }
411 if (chunks === "all") {
412 return ALL_CHUNK_FILTER;
413 }
414 if (typeof chunks === "function") {
415 return chunks;
416 }
417};
418
419/**
420 * @param {GetCacheGroups | Record<string, false|string|RegExp|OptimizationSplitChunksGetCacheGroups|OptimizationSplitChunksCacheGroup>} cacheGroups the cache group options
421 * @param {string[]} defaultSizeTypes the default size types
422 * @returns {GetCacheGroups} a function to get the cache groups
423 */
424const normalizeCacheGroups = (cacheGroups, defaultSizeTypes) => {
425 if (typeof cacheGroups === "function") {
426 return cacheGroups;
427 }
428 if (typeof cacheGroups === "object" && cacheGroups !== null) {
429 /** @type {(function(Module, CacheGroupsContext, CacheGroupSource[]): void)[]} */
430 const handlers = [];
431 for (const key of Object.keys(cacheGroups)) {
432 const option = cacheGroups[key];
433 if (option === false) {
434 continue;
435 }
436 if (typeof option === "string" || option instanceof RegExp) {
437 const source = createCacheGroupSource({}, key, defaultSizeTypes);
438 handlers.push((module, context, results) => {
439 if (checkTest(option, module, context)) {
440 results.push(source);
441 }
442 });
443 } else if (typeof option === "function") {
444 const cache = new WeakMap();
445 handlers.push((module, context, results) => {
446 const result = option(module);
447 if (result) {
448 const groups = Array.isArray(result) ? result : [result];
449 for (const group of groups) {
450 const cachedSource = cache.get(group);
451 if (cachedSource !== undefined) {
452 results.push(cachedSource);
453 } else {
454 const source = createCacheGroupSource(
455 group,
456 key,
457 defaultSizeTypes
458 );
459 cache.set(group, source);
460 results.push(source);
461 }
462 }
463 }
464 });
465 } else {
466 const source = createCacheGroupSource(option, key, defaultSizeTypes);
467 handlers.push((module, context, results) => {
468 if (
469 checkTest(option.test, module, context) &&
470 checkModuleType(option.type, module) &&
471 checkModuleLayer(option.layer, module)
472 ) {
473 results.push(source);
474 }
475 });
476 }
477 }
478 /**
479 * @param {Module} module the current module
480 * @param {CacheGroupsContext} context the current context
481 * @returns {CacheGroupSource[]} the matching cache groups
482 */
483 const fn = (module, context) => {
484 /** @type {CacheGroupSource[]} */
485 let results = [];
486 for (const fn of handlers) {
487 fn(module, context, results);
488 }
489 return results;
490 };
491 return fn;
492 }
493 return () => null;
494};
495
496/**
497 * @param {undefined|boolean|string|RegExp|Function} test test option
498 * @param {Module} module the module
499 * @param {CacheGroupsContext} context context object
500 * @returns {boolean} true, if the module should be selected
501 */
502const checkTest = (test, module, context) => {
503 if (test === undefined) return true;
504 if (typeof test === "function") {
505 return test(module, context);
506 }
507 if (typeof test === "boolean") return test;
508 if (typeof test === "string") {
509 const name = module.nameForCondition();
510 return name && name.startsWith(test);
511 }
512 if (test instanceof RegExp) {
513 const name = module.nameForCondition();
514 return name && test.test(name);
515 }
516 return false;
517};
518
519/**
520 * @param {undefined|string|RegExp|Function} test type option
521 * @param {Module} module the module
522 * @returns {boolean} true, if the module should be selected
523 */
524const checkModuleType = (test, module) => {
525 if (test === undefined) return true;
526 if (typeof test === "function") {
527 return test(module.type);
528 }
529 if (typeof test === "string") {
530 const type = module.type;
531 return test === type;
532 }
533 if (test instanceof RegExp) {
534 const type = module.type;
535 return test.test(type);
536 }
537 return false;
538};
539
540/**
541 * @param {undefined|string|RegExp|Function} test type option
542 * @param {Module} module the module
543 * @returns {boolean} true, if the module should be selected
544 */
545const checkModuleLayer = (test, module) => {
546 if (test === undefined) return true;
547 if (typeof test === "function") {
548 return test(module.layer);
549 }
550 if (typeof test === "string") {
551 const layer = module.layer;
552 return test === "" ? !layer : layer && layer.startsWith(test);
553 }
554 if (test instanceof RegExp) {
555 const layer = module.layer;
556 return test.test(layer);
557 }
558 return false;
559};
560
561/**
562 * @param {OptimizationSplitChunksCacheGroup} options the group options
563 * @param {string} key key of cache group
564 * @param {string[]} defaultSizeTypes the default size types
565 * @returns {CacheGroupSource} the normalized cached group
566 */
567const createCacheGroupSource = (options, key, defaultSizeTypes) => {
568 const minSize = normalizeSizes(options.minSize, defaultSizeTypes);
569 const minSizeReduction = normalizeSizes(
570 options.minSizeReduction,
571 defaultSizeTypes
572 );
573 const maxSize = normalizeSizes(options.maxSize, defaultSizeTypes);
574 return {
575 key,
576 priority: options.priority,
577 getName: normalizeName(options.name),
578 chunksFilter: normalizeChunksFilter(options.chunks),
579 enforce: options.enforce,
580 minSize,
581 minSizeReduction,
582 minRemainingSize: mergeSizes(
583 normalizeSizes(options.minRemainingSize, defaultSizeTypes),
584 minSize
585 ),
586 enforceSizeThreshold: normalizeSizes(
587 options.enforceSizeThreshold,
588 defaultSizeTypes
589 ),
590 maxAsyncSize: mergeSizes(
591 normalizeSizes(options.maxAsyncSize, defaultSizeTypes),
592 maxSize
593 ),
594 maxInitialSize: mergeSizes(
595 normalizeSizes(options.maxInitialSize, defaultSizeTypes),
596 maxSize
597 ),
598 minChunks: options.minChunks,
599 maxAsyncRequests: options.maxAsyncRequests,
600 maxInitialRequests: options.maxInitialRequests,
601 filename: options.filename,
602 idHint: options.idHint,
603 automaticNameDelimiter: options.automaticNameDelimiter,
604 reuseExistingChunk: options.reuseExistingChunk,
605 usedExports: options.usedExports
606 };
607};
608
609module.exports = class SplitChunksPlugin {
610 /**
611 * @param {OptimizationSplitChunksOptions=} options plugin options
612 */
613 constructor(options = {}) {
614 const defaultSizeTypes = options.defaultSizeTypes || [
615 "javascript",
616 "unknown"
617 ];
618 const fallbackCacheGroup = options.fallbackCacheGroup || {};
619 const minSize = normalizeSizes(options.minSize, defaultSizeTypes);
620 const minSizeReduction = normalizeSizes(
621 options.minSizeReduction,
622 defaultSizeTypes
623 );
624 const maxSize = normalizeSizes(options.maxSize, defaultSizeTypes);
625
626 /** @type {SplitChunksOptions} */
627 this.options = {
628 chunksFilter: normalizeChunksFilter(options.chunks || "all"),
629 defaultSizeTypes,
630 minSize,
631 minSizeReduction,
632 minRemainingSize: mergeSizes(
633 normalizeSizes(options.minRemainingSize, defaultSizeTypes),
634 minSize
635 ),
636 enforceSizeThreshold: normalizeSizes(
637 options.enforceSizeThreshold,
638 defaultSizeTypes
639 ),
640 maxAsyncSize: mergeSizes(
641 normalizeSizes(options.maxAsyncSize, defaultSizeTypes),
642 maxSize
643 ),
644 maxInitialSize: mergeSizes(
645 normalizeSizes(options.maxInitialSize, defaultSizeTypes),
646 maxSize
647 ),
648 minChunks: options.minChunks || 1,
649 maxAsyncRequests: options.maxAsyncRequests || 1,
650 maxInitialRequests: options.maxInitialRequests || 1,
651 hidePathInfo: options.hidePathInfo || false,
652 filename: options.filename || undefined,
653 getCacheGroups: normalizeCacheGroups(
654 options.cacheGroups,
655 defaultSizeTypes
656 ),
657 getName: options.name ? normalizeName(options.name) : defaultGetName,
658 automaticNameDelimiter: options.automaticNameDelimiter,
659 usedExports: options.usedExports,
660 fallbackCacheGroup: {
661 minSize: mergeSizes(
662 normalizeSizes(fallbackCacheGroup.minSize, defaultSizeTypes),
663 minSize
664 ),
665 maxAsyncSize: mergeSizes(
666 normalizeSizes(fallbackCacheGroup.maxAsyncSize, defaultSizeTypes),
667 normalizeSizes(fallbackCacheGroup.maxSize, defaultSizeTypes),
668 normalizeSizes(options.maxAsyncSize, defaultSizeTypes),
669 normalizeSizes(options.maxSize, defaultSizeTypes)
670 ),
671 maxInitialSize: mergeSizes(
672 normalizeSizes(fallbackCacheGroup.maxInitialSize, defaultSizeTypes),
673 normalizeSizes(fallbackCacheGroup.maxSize, defaultSizeTypes),
674 normalizeSizes(options.maxInitialSize, defaultSizeTypes),
675 normalizeSizes(options.maxSize, defaultSizeTypes)
676 ),
677 automaticNameDelimiter:
678 fallbackCacheGroup.automaticNameDelimiter ||
679 options.automaticNameDelimiter ||
680 "~"
681 }
682 };
683
684 /** @type {WeakMap<CacheGroupSource, CacheGroup>} */
685 this._cacheGroupCache = new WeakMap();
686 }
687
688 /**
689 * @param {CacheGroupSource} cacheGroupSource source
690 * @returns {CacheGroup} the cache group (cached)
691 */
692 _getCacheGroup(cacheGroupSource) {
693 const cacheEntry = this._cacheGroupCache.get(cacheGroupSource);
694 if (cacheEntry !== undefined) return cacheEntry;
695 const minSize = mergeSizes(
696 cacheGroupSource.minSize,
697 cacheGroupSource.enforce ? undefined : this.options.minSize
698 );
699 const minSizeReduction = mergeSizes(
700 cacheGroupSource.minSizeReduction,
701 cacheGroupSource.enforce ? undefined : this.options.minSizeReduction
702 );
703 const minRemainingSize = mergeSizes(
704 cacheGroupSource.minRemainingSize,
705 cacheGroupSource.enforce ? undefined : this.options.minRemainingSize
706 );
707 const enforceSizeThreshold = mergeSizes(
708 cacheGroupSource.enforceSizeThreshold,
709 cacheGroupSource.enforce ? undefined : this.options.enforceSizeThreshold
710 );
711 const cacheGroup = {
712 key: cacheGroupSource.key,
713 priority: cacheGroupSource.priority || 0,
714 chunksFilter: cacheGroupSource.chunksFilter || this.options.chunksFilter,
715 minSize,
716 minSizeReduction,
717 minRemainingSize,
718 enforceSizeThreshold,
719 maxAsyncSize: mergeSizes(
720 cacheGroupSource.maxAsyncSize,
721 cacheGroupSource.enforce ? undefined : this.options.maxAsyncSize
722 ),
723 maxInitialSize: mergeSizes(
724 cacheGroupSource.maxInitialSize,
725 cacheGroupSource.enforce ? undefined : this.options.maxInitialSize
726 ),
727 minChunks:
728 cacheGroupSource.minChunks !== undefined
729 ? cacheGroupSource.minChunks
730 : cacheGroupSource.enforce
731 ? 1
732 : this.options.minChunks,
733 maxAsyncRequests:
734 cacheGroupSource.maxAsyncRequests !== undefined
735 ? cacheGroupSource.maxAsyncRequests
736 : cacheGroupSource.enforce
737 ? Infinity
738 : this.options.maxAsyncRequests,
739 maxInitialRequests:
740 cacheGroupSource.maxInitialRequests !== undefined
741 ? cacheGroupSource.maxInitialRequests
742 : cacheGroupSource.enforce
743 ? Infinity
744 : this.options.maxInitialRequests,
745 getName:
746 cacheGroupSource.getName !== undefined
747 ? cacheGroupSource.getName
748 : this.options.getName,
749 usedExports:
750 cacheGroupSource.usedExports !== undefined
751 ? cacheGroupSource.usedExports
752 : this.options.usedExports,
753 filename:
754 cacheGroupSource.filename !== undefined
755 ? cacheGroupSource.filename
756 : this.options.filename,
757 automaticNameDelimiter:
758 cacheGroupSource.automaticNameDelimiter !== undefined
759 ? cacheGroupSource.automaticNameDelimiter
760 : this.options.automaticNameDelimiter,
761 idHint:
762 cacheGroupSource.idHint !== undefined
763 ? cacheGroupSource.idHint
764 : cacheGroupSource.key,
765 reuseExistingChunk: cacheGroupSource.reuseExistingChunk || false,
766 _validateSize: hasNonZeroSizes(minSize),
767 _validateRemainingSize: hasNonZeroSizes(minRemainingSize),
768 _minSizeForMaxSize: mergeSizes(
769 cacheGroupSource.minSize,
770 this.options.minSize
771 ),
772 _conditionalEnforce: hasNonZeroSizes(enforceSizeThreshold)
773 };
774 this._cacheGroupCache.set(cacheGroupSource, cacheGroup);
775 return cacheGroup;
776 }
777
778 /**
779 * Apply the plugin
780 * @param {Compiler} compiler the compiler instance
781 * @returns {void}
782 */
783 apply(compiler) {
784 const cachedMakePathsRelative = makePathsRelative.bindContextCache(
785 compiler.context,
786 compiler.root
787 );
788 compiler.hooks.thisCompilation.tap("SplitChunksPlugin", compilation => {
789 const logger = compilation.getLogger("webpack.SplitChunksPlugin");
790 let alreadyOptimized = false;
791 compilation.hooks.unseal.tap("SplitChunksPlugin", () => {
792 alreadyOptimized = false;
793 });
794 compilation.hooks.optimizeChunks.tap(
795 {
796 name: "SplitChunksPlugin",
797 stage: STAGE_ADVANCED
798 },
799 chunks => {
800 if (alreadyOptimized) return;
801 alreadyOptimized = true;
802 logger.time("prepare");
803 const chunkGraph = compilation.chunkGraph;
804 const moduleGraph = compilation.moduleGraph;
805 // Give each selected chunk an index (to create strings from chunks)
806 /** @type {Map<Chunk, bigint>} */
807 const chunkIndexMap = new Map();
808 const ZERO = BigInt("0");
809 const ONE = BigInt("1");
810 const START = ONE << BigInt("31");
811 let index = START;
812 for (const chunk of chunks) {
813 chunkIndexMap.set(
814 chunk,
815 index | BigInt((Math.random() * 0x7fffffff) | 0)
816 );
817 index = index << ONE;
818 }
819 /**
820 * @param {Iterable<Chunk>} chunks list of chunks
821 * @returns {bigint | Chunk} key of the chunks
822 */
823 const getKey = chunks => {
824 const iterator = chunks[Symbol.iterator]();
825 let result = iterator.next();
826 if (result.done) return ZERO;
827 const first = result.value;
828 result = iterator.next();
829 if (result.done) return first;
830 let key =
831 chunkIndexMap.get(first) | chunkIndexMap.get(result.value);
832 while (!(result = iterator.next()).done) {
833 const raw = chunkIndexMap.get(result.value);
834 key = key ^ raw;
835 }
836 return key;
837 };
838 const keyToString = key => {
839 if (typeof key === "bigint") return key.toString(16);
840 return chunkIndexMap.get(key).toString(16);
841 };
842
843 const getChunkSetsInGraph = memoize(() => {
844 /** @type {Map<bigint, Set<Chunk>>} */
845 const chunkSetsInGraph = new Map();
846 /** @type {Set<Chunk>} */
847 const singleChunkSets = new Set();
848 for (const module of compilation.modules) {
849 const chunks = chunkGraph.getModuleChunksIterable(module);
850 const chunksKey = getKey(chunks);
851 if (typeof chunksKey === "bigint") {
852 if (!chunkSetsInGraph.has(chunksKey)) {
853 chunkSetsInGraph.set(chunksKey, new Set(chunks));
854 }
855 } else {
856 singleChunkSets.add(chunksKey);
857 }
858 }
859 return { chunkSetsInGraph, singleChunkSets };
860 });
861
862 /**
863 * @param {Module} module the module
864 * @returns {Iterable<Chunk[]>} groups of chunks with equal exports
865 */
866 const groupChunksByExports = module => {
867 const exportsInfo = moduleGraph.getExportsInfo(module);
868 const groupedByUsedExports = new Map();
869 for (const chunk of chunkGraph.getModuleChunksIterable(module)) {
870 const key = exportsInfo.getUsageKey(chunk.runtime);
871 const list = groupedByUsedExports.get(key);
872 if (list !== undefined) {
873 list.push(chunk);
874 } else {
875 groupedByUsedExports.set(key, [chunk]);
876 }
877 }
878 return groupedByUsedExports.values();
879 };
880
881 /** @type {Map<Module, Iterable<Chunk[]>>} */
882 const groupedByExportsMap = new Map();
883
884 const getExportsChunkSetsInGraph = memoize(() => {
885 /** @type {Map<bigint, Set<Chunk>>} */
886 const chunkSetsInGraph = new Map();
887 /** @type {Set<Chunk>} */
888 const singleChunkSets = new Set();
889 for (const module of compilation.modules) {
890 const groupedChunks = Array.from(groupChunksByExports(module));
891 groupedByExportsMap.set(module, groupedChunks);
892 for (const chunks of groupedChunks) {
893 if (chunks.length === 1) {
894 singleChunkSets.add(chunks[0]);
895 } else {
896 const chunksKey = /** @type {bigint} */ (getKey(chunks));
897 if (!chunkSetsInGraph.has(chunksKey)) {
898 chunkSetsInGraph.set(chunksKey, new Set(chunks));
899 }
900 }
901 }
902 }
903 return { chunkSetsInGraph, singleChunkSets };
904 });
905
906 // group these set of chunks by count
907 // to allow to check less sets via isSubset
908 // (only smaller sets can be subset)
909 const groupChunkSetsByCount = chunkSets => {
910 /** @type {Map<number, Array<Set<Chunk>>>} */
911 const chunkSetsByCount = new Map();
912 for (const chunksSet of chunkSets) {
913 const count = chunksSet.size;
914 let array = chunkSetsByCount.get(count);
915 if (array === undefined) {
916 array = [];
917 chunkSetsByCount.set(count, array);
918 }
919 array.push(chunksSet);
920 }
921 return chunkSetsByCount;
922 };
923 const getChunkSetsByCount = memoize(() =>
924 groupChunkSetsByCount(
925 getChunkSetsInGraph().chunkSetsInGraph.values()
926 )
927 );
928 const getExportsChunkSetsByCount = memoize(() =>
929 groupChunkSetsByCount(
930 getExportsChunkSetsInGraph().chunkSetsInGraph.values()
931 )
932 );
933
934 // Create a list of possible combinations
935 const createGetCombinations = (
936 chunkSets,
937 singleChunkSets,
938 chunkSetsByCount
939 ) => {
940 /** @type {Map<bigint | Chunk, (Set<Chunk> | Chunk)[]>} */
941 const combinationsCache = new Map();
942
943 return key => {
944 const cacheEntry = combinationsCache.get(key);
945 if (cacheEntry !== undefined) return cacheEntry;
946 if (key instanceof Chunk) {
947 const result = [key];
948 combinationsCache.set(key, result);
949 return result;
950 }
951 const chunksSet = chunkSets.get(key);
952 /** @type {(Set<Chunk> | Chunk)[]} */
953 const array = [chunksSet];
954 for (const [count, setArray] of chunkSetsByCount) {
955 // "equal" is not needed because they would have been merge in the first step
956 if (count < chunksSet.size) {
957 for (const set of setArray) {
958 if (isSubset(chunksSet, set)) {
959 array.push(set);
960 }
961 }
962 }
963 }
964 for (const chunk of singleChunkSets) {
965 if (chunksSet.has(chunk)) {
966 array.push(chunk);
967 }
968 }
969 combinationsCache.set(key, array);
970 return array;
971 };
972 };
973
974 const getCombinationsFactory = memoize(() => {
975 const { chunkSetsInGraph, singleChunkSets } = getChunkSetsInGraph();
976 return createGetCombinations(
977 chunkSetsInGraph,
978 singleChunkSets,
979 getChunkSetsByCount()
980 );
981 });
982 const getCombinations = key => getCombinationsFactory()(key);
983
984 const getExportsCombinationsFactory = memoize(() => {
985 const { chunkSetsInGraph, singleChunkSets } =
986 getExportsChunkSetsInGraph();
987 return createGetCombinations(
988 chunkSetsInGraph,
989 singleChunkSets,
990 getExportsChunkSetsByCount()
991 );
992 });
993 const getExportsCombinations = key =>
994 getExportsCombinationsFactory()(key);
995
996 /**
997 * @typedef {Object} SelectedChunksResult
998 * @property {Chunk[]} chunks the list of chunks
999 * @property {bigint | Chunk} key a key of the list
1000 */
1001
1002 /** @type {WeakMap<Set<Chunk> | Chunk, WeakMap<ChunkFilterFunction, SelectedChunksResult>>} */
1003 const selectedChunksCacheByChunksSet = new WeakMap();
1004
1005 /**
1006 * get list and key by applying the filter function to the list
1007 * It is cached for performance reasons
1008 * @param {Set<Chunk> | Chunk} chunks list of chunks
1009 * @param {ChunkFilterFunction} chunkFilter filter function for chunks
1010 * @returns {SelectedChunksResult} list and key
1011 */
1012 const getSelectedChunks = (chunks, chunkFilter) => {
1013 let entry = selectedChunksCacheByChunksSet.get(chunks);
1014 if (entry === undefined) {
1015 entry = new WeakMap();
1016 selectedChunksCacheByChunksSet.set(chunks, entry);
1017 }
1018 /** @type {SelectedChunksResult} */
1019 let entry2 = entry.get(chunkFilter);
1020 if (entry2 === undefined) {
1021 /** @type {Chunk[]} */
1022 const selectedChunks = [];
1023 if (chunks instanceof Chunk) {
1024 if (chunkFilter(chunks)) selectedChunks.push(chunks);
1025 } else {
1026 for (const chunk of chunks) {
1027 if (chunkFilter(chunk)) selectedChunks.push(chunk);
1028 }
1029 }
1030 entry2 = {
1031 chunks: selectedChunks,
1032 key: getKey(selectedChunks)
1033 };
1034 entry.set(chunkFilter, entry2);
1035 }
1036 return entry2;
1037 };
1038
1039 /** @type {Map<string, boolean>} */
1040 const alreadyValidatedParents = new Map();
1041 /** @type {Set<string>} */
1042 const alreadyReportedErrors = new Set();
1043
1044 // Map a list of chunks to a list of modules
1045 // For the key the chunk "index" is used, the value is a SortableSet of modules
1046 /** @type {Map<string, ChunksInfoItem>} */
1047 const chunksInfoMap = new Map();
1048
1049 /**
1050 * @param {CacheGroup} cacheGroup the current cache group
1051 * @param {number} cacheGroupIndex the index of the cache group of ordering
1052 * @param {Chunk[]} selectedChunks chunks selected for this module
1053 * @param {bigint | Chunk} selectedChunksKey a key of selectedChunks
1054 * @param {Module} module the current module
1055 * @returns {void}
1056 */
1057 const addModuleToChunksInfoMap = (
1058 cacheGroup,
1059 cacheGroupIndex,
1060 selectedChunks,
1061 selectedChunksKey,
1062 module
1063 ) => {
1064 // Break if minimum number of chunks is not reached
1065 if (selectedChunks.length < cacheGroup.minChunks) return;
1066 // Determine name for split chunk
1067 const name = cacheGroup.getName(
1068 module,
1069 selectedChunks,
1070 cacheGroup.key
1071 );
1072 // Check if the name is ok
1073 const existingChunk = compilation.namedChunks.get(name);
1074 if (existingChunk) {
1075 const parentValidationKey = `${name}|${
1076 typeof selectedChunksKey === "bigint"
1077 ? selectedChunksKey
1078 : selectedChunksKey.debugId
1079 }`;
1080 const valid = alreadyValidatedParents.get(parentValidationKey);
1081 if (valid === false) return;
1082 if (valid === undefined) {
1083 // Module can only be moved into the existing chunk if the existing chunk
1084 // is a parent of all selected chunks
1085 let isInAllParents = true;
1086 /** @type {Set<ChunkGroup>} */
1087 const queue = new Set();
1088 for (const chunk of selectedChunks) {
1089 for (const group of chunk.groupsIterable) {
1090 queue.add(group);
1091 }
1092 }
1093 for (const group of queue) {
1094 if (existingChunk.isInGroup(group)) continue;
1095 let hasParent = false;
1096 for (const parent of group.parentsIterable) {
1097 hasParent = true;
1098 queue.add(parent);
1099 }
1100 if (!hasParent) {
1101 isInAllParents = false;
1102 }
1103 }
1104 const valid = isInAllParents;
1105 alreadyValidatedParents.set(parentValidationKey, valid);
1106 if (!valid) {
1107 if (!alreadyReportedErrors.has(name)) {
1108 alreadyReportedErrors.add(name);
1109 compilation.errors.push(
1110 new WebpackError(
1111 "SplitChunksPlugin\n" +
1112 `Cache group "${cacheGroup.key}" conflicts with existing chunk.\n` +
1113 `Both have the same name "${name}" and existing chunk is not a parent of the selected modules.\n` +
1114 "Use a different name for the cache group or make sure that the existing chunk is a parent (e. g. via dependOn).\n" +
1115 'HINT: You can omit "name" to automatically create a name.\n' +
1116 "BREAKING CHANGE: webpack < 5 used to allow to use an entrypoint as splitChunk. " +
1117 "This is no longer allowed when the entrypoint is not a parent of the selected modules.\n" +
1118 "Remove this entrypoint and add modules to cache group's 'test' instead. " +
1119 "If you need modules to be evaluated on startup, add them to the existing entrypoints (make them arrays). " +
1120 "See migration guide of more info."
1121 )
1122 );
1123 }
1124 return;
1125 }
1126 }
1127 }
1128 // Create key for maps
1129 // When it has a name we use the name as key
1130 // Otherwise we create the key from chunks and cache group key
1131 // This automatically merges equal names
1132 const key =
1133 cacheGroup.key +
1134 (name
1135 ? ` name:${name}`
1136 : ` chunks:${keyToString(selectedChunksKey)}`);
1137 // Add module to maps
1138 let info = chunksInfoMap.get(key);
1139 if (info === undefined) {
1140 chunksInfoMap.set(
1141 key,
1142 (info = {
1143 modules: new SortableSet(
1144 undefined,
1145 compareModulesByIdentifier
1146 ),
1147 cacheGroup,
1148 cacheGroupIndex,
1149 name,
1150 sizes: {},
1151 chunks: new Set(),
1152 reuseableChunks: new Set(),
1153 chunksKeys: new Set()
1154 })
1155 );
1156 }
1157 const oldSize = info.modules.size;
1158 info.modules.add(module);
1159 if (info.modules.size !== oldSize) {
1160 for (const type of module.getSourceTypes()) {
1161 info.sizes[type] = (info.sizes[type] || 0) + module.size(type);
1162 }
1163 }
1164 const oldChunksKeysSize = info.chunksKeys.size;
1165 info.chunksKeys.add(selectedChunksKey);
1166 if (oldChunksKeysSize !== info.chunksKeys.size) {
1167 for (const chunk of selectedChunks) {
1168 info.chunks.add(chunk);
1169 }
1170 }
1171 };
1172
1173 const context = {
1174 moduleGraph,
1175 chunkGraph
1176 };
1177
1178 logger.timeEnd("prepare");
1179
1180 logger.time("modules");
1181
1182 // Walk through all modules
1183 for (const module of compilation.modules) {
1184 // Get cache group
1185 let cacheGroups = this.options.getCacheGroups(module, context);
1186 if (!Array.isArray(cacheGroups) || cacheGroups.length === 0) {
1187 continue;
1188 }
1189
1190 // Prepare some values (usedExports = false)
1191 const getCombs = memoize(() => {
1192 const chunks = chunkGraph.getModuleChunksIterable(module);
1193 const chunksKey = getKey(chunks);
1194 return getCombinations(chunksKey);
1195 });
1196
1197 // Prepare some values (usedExports = true)
1198 const getCombsByUsedExports = memoize(() => {
1199 // fill the groupedByExportsMap
1200 getExportsChunkSetsInGraph();
1201 /** @type {Set<Set<Chunk> | Chunk>} */
1202 const set = new Set();
1203 const groupedByUsedExports = groupedByExportsMap.get(module);
1204 for (const chunks of groupedByUsedExports) {
1205 const chunksKey = getKey(chunks);
1206 for (const comb of getExportsCombinations(chunksKey))
1207 set.add(comb);
1208 }
1209 return set;
1210 });
1211
1212 let cacheGroupIndex = 0;
1213 for (const cacheGroupSource of cacheGroups) {
1214 const cacheGroup = this._getCacheGroup(cacheGroupSource);
1215
1216 const combs = cacheGroup.usedExports
1217 ? getCombsByUsedExports()
1218 : getCombs();
1219 // For all combination of chunk selection
1220 for (const chunkCombination of combs) {
1221 // Break if minimum number of chunks is not reached
1222 const count =
1223 chunkCombination instanceof Chunk ? 1 : chunkCombination.size;
1224 if (count < cacheGroup.minChunks) continue;
1225 // Select chunks by configuration
1226 const { chunks: selectedChunks, key: selectedChunksKey } =
1227 getSelectedChunks(chunkCombination, cacheGroup.chunksFilter);
1228
1229 addModuleToChunksInfoMap(
1230 cacheGroup,
1231 cacheGroupIndex,
1232 selectedChunks,
1233 selectedChunksKey,
1234 module
1235 );
1236 }
1237 cacheGroupIndex++;
1238 }
1239 }
1240
1241 logger.timeEnd("modules");
1242
1243 logger.time("queue");
1244
1245 /**
1246 * @param {ChunksInfoItem} info entry
1247 * @param {string[]} sourceTypes source types to be removed
1248 */
1249 const removeModulesWithSourceType = (info, sourceTypes) => {
1250 for (const module of info.modules) {
1251 const types = module.getSourceTypes();
1252 if (sourceTypes.some(type => types.has(type))) {
1253 info.modules.delete(module);
1254 for (const type of types) {
1255 info.sizes[type] -= module.size(type);
1256 }
1257 }
1258 }
1259 };
1260
1261 /**
1262 * @param {ChunksInfoItem} info entry
1263 * @returns {boolean} true, if entry become empty
1264 */
1265 const removeMinSizeViolatingModules = info => {
1266 if (!info.cacheGroup._validateSize) return false;
1267 const violatingSizes = getViolatingMinSizes(
1268 info.sizes,
1269 info.cacheGroup.minSize
1270 );
1271 if (violatingSizes === undefined) return false;
1272 removeModulesWithSourceType(info, violatingSizes);
1273 return info.modules.size === 0;
1274 };
1275
1276 // Filter items were size < minSize
1277 for (const [key, info] of chunksInfoMap) {
1278 if (removeMinSizeViolatingModules(info)) {
1279 chunksInfoMap.delete(key);
1280 } else if (
1281 !checkMinSizeReduction(
1282 info.sizes,
1283 info.cacheGroup.minSizeReduction,
1284 info.chunks.size
1285 )
1286 ) {
1287 chunksInfoMap.delete(key);
1288 }
1289 }
1290
1291 /**
1292 * @typedef {Object} MaxSizeQueueItem
1293 * @property {SplitChunksSizes} minSize
1294 * @property {SplitChunksSizes} maxAsyncSize
1295 * @property {SplitChunksSizes} maxInitialSize
1296 * @property {string} automaticNameDelimiter
1297 * @property {string[]} keys
1298 */
1299
1300 /** @type {Map<Chunk, MaxSizeQueueItem>} */
1301 const maxSizeQueueMap = new Map();
1302
1303 while (chunksInfoMap.size > 0) {
1304 // Find best matching entry
1305 let bestEntryKey;
1306 let bestEntry;
1307 for (const pair of chunksInfoMap) {
1308 const key = pair[0];
1309 const info = pair[1];
1310 if (
1311 bestEntry === undefined ||
1312 compareEntries(bestEntry, info) < 0
1313 ) {
1314 bestEntry = info;
1315 bestEntryKey = key;
1316 }
1317 }
1318
1319 const item = bestEntry;
1320 chunksInfoMap.delete(bestEntryKey);
1321
1322 let chunkName = item.name;
1323 // Variable for the new chunk (lazy created)
1324 /** @type {Chunk} */
1325 let newChunk;
1326 // When no chunk name, check if we can reuse a chunk instead of creating a new one
1327 let isExistingChunk = false;
1328 let isReusedWithAllModules = false;
1329 if (chunkName) {
1330 const chunkByName = compilation.namedChunks.get(chunkName);
1331 if (chunkByName !== undefined) {
1332 newChunk = chunkByName;
1333 const oldSize = item.chunks.size;
1334 item.chunks.delete(newChunk);
1335 isExistingChunk = item.chunks.size !== oldSize;
1336 }
1337 } else if (item.cacheGroup.reuseExistingChunk) {
1338 outer: for (const chunk of item.chunks) {
1339 if (
1340 chunkGraph.getNumberOfChunkModules(chunk) !==
1341 item.modules.size
1342 ) {
1343 continue;
1344 }
1345 if (
1346 item.chunks.size > 1 &&
1347 chunkGraph.getNumberOfEntryModules(chunk) > 0
1348 ) {
1349 continue;
1350 }
1351 for (const module of item.modules) {
1352 if (!chunkGraph.isModuleInChunk(module, chunk)) {
1353 continue outer;
1354 }
1355 }
1356 if (!newChunk || !newChunk.name) {
1357 newChunk = chunk;
1358 } else if (
1359 chunk.name &&
1360 chunk.name.length < newChunk.name.length
1361 ) {
1362 newChunk = chunk;
1363 } else if (
1364 chunk.name &&
1365 chunk.name.length === newChunk.name.length &&
1366 chunk.name < newChunk.name
1367 ) {
1368 newChunk = chunk;
1369 }
1370 }
1371 if (newChunk) {
1372 item.chunks.delete(newChunk);
1373 chunkName = undefined;
1374 isExistingChunk = true;
1375 isReusedWithAllModules = true;
1376 }
1377 }
1378
1379 const enforced =
1380 item.cacheGroup._conditionalEnforce &&
1381 checkMinSize(item.sizes, item.cacheGroup.enforceSizeThreshold);
1382
1383 const usedChunks = new Set(item.chunks);
1384
1385 // Check if maxRequests condition can be fulfilled
1386 if (
1387 !enforced &&
1388 (Number.isFinite(item.cacheGroup.maxInitialRequests) ||
1389 Number.isFinite(item.cacheGroup.maxAsyncRequests))
1390 ) {
1391 for (const chunk of usedChunks) {
1392 // respect max requests
1393 const maxRequests = chunk.isOnlyInitial()
1394 ? item.cacheGroup.maxInitialRequests
1395 : chunk.canBeInitial()
1396 ? Math.min(
1397 item.cacheGroup.maxInitialRequests,
1398 item.cacheGroup.maxAsyncRequests
1399 )
1400 : item.cacheGroup.maxAsyncRequests;
1401 if (
1402 isFinite(maxRequests) &&
1403 getRequests(chunk) >= maxRequests
1404 ) {
1405 usedChunks.delete(chunk);
1406 }
1407 }
1408 }
1409
1410 outer: for (const chunk of usedChunks) {
1411 for (const module of item.modules) {
1412 if (chunkGraph.isModuleInChunk(module, chunk)) continue outer;
1413 }
1414 usedChunks.delete(chunk);
1415 }
1416
1417 // Were some (invalid) chunks removed from usedChunks?
1418 // => readd all modules to the queue, as things could have been changed
1419 if (usedChunks.size < item.chunks.size) {
1420 if (isExistingChunk) usedChunks.add(newChunk);
1421 if (usedChunks.size >= item.cacheGroup.minChunks) {
1422 const chunksArr = Array.from(usedChunks);
1423 for (const module of item.modules) {
1424 addModuleToChunksInfoMap(
1425 item.cacheGroup,
1426 item.cacheGroupIndex,
1427 chunksArr,
1428 getKey(usedChunks),
1429 module
1430 );
1431 }
1432 }
1433 continue;
1434 }
1435
1436 // Validate minRemainingSize constraint when a single chunk is left over
1437 if (
1438 !enforced &&
1439 item.cacheGroup._validateRemainingSize &&
1440 usedChunks.size === 1
1441 ) {
1442 const [chunk] = usedChunks;
1443 let chunkSizes = Object.create(null);
1444 for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
1445 if (!item.modules.has(module)) {
1446 for (const type of module.getSourceTypes()) {
1447 chunkSizes[type] =
1448 (chunkSizes[type] || 0) + module.size(type);
1449 }
1450 }
1451 }
1452 const violatingSizes = getViolatingMinSizes(
1453 chunkSizes,
1454 item.cacheGroup.minRemainingSize
1455 );
1456 if (violatingSizes !== undefined) {
1457 const oldModulesSize = item.modules.size;
1458 removeModulesWithSourceType(item, violatingSizes);
1459 if (
1460 item.modules.size > 0 &&
1461 item.modules.size !== oldModulesSize
1462 ) {
1463 // queue this item again to be processed again
1464 // without violating modules
1465 chunksInfoMap.set(bestEntryKey, item);
1466 }
1467 continue;
1468 }
1469 }
1470
1471 // Create the new chunk if not reusing one
1472 if (newChunk === undefined) {
1473 newChunk = compilation.addChunk(chunkName);
1474 }
1475 // Walk through all chunks
1476 for (const chunk of usedChunks) {
1477 // Add graph connections for splitted chunk
1478 chunk.split(newChunk);
1479 }
1480
1481 // Add a note to the chunk
1482 newChunk.chunkReason =
1483 (newChunk.chunkReason ? newChunk.chunkReason + ", " : "") +
1484 (isReusedWithAllModules
1485 ? "reused as split chunk"
1486 : "split chunk");
1487 if (item.cacheGroup.key) {
1488 newChunk.chunkReason += ` (cache group: ${item.cacheGroup.key})`;
1489 }
1490 if (chunkName) {
1491 newChunk.chunkReason += ` (name: ${chunkName})`;
1492 }
1493 if (item.cacheGroup.filename) {
1494 newChunk.filenameTemplate = item.cacheGroup.filename;
1495 }
1496 if (item.cacheGroup.idHint) {
1497 newChunk.idNameHints.add(item.cacheGroup.idHint);
1498 }
1499 if (!isReusedWithAllModules) {
1500 // Add all modules to the new chunk
1501 for (const module of item.modules) {
1502 if (!module.chunkCondition(newChunk, compilation)) continue;
1503 // Add module to new chunk
1504 chunkGraph.connectChunkAndModule(newChunk, module);
1505 // Remove module from used chunks
1506 for (const chunk of usedChunks) {
1507 chunkGraph.disconnectChunkAndModule(chunk, module);
1508 }
1509 }
1510 } else {
1511 // Remove all modules from used chunks
1512 for (const module of item.modules) {
1513 for (const chunk of usedChunks) {
1514 chunkGraph.disconnectChunkAndModule(chunk, module);
1515 }
1516 }
1517 }
1518
1519 if (
1520 Object.keys(item.cacheGroup.maxAsyncSize).length > 0 ||
1521 Object.keys(item.cacheGroup.maxInitialSize).length > 0
1522 ) {
1523 const oldMaxSizeSettings = maxSizeQueueMap.get(newChunk);
1524 maxSizeQueueMap.set(newChunk, {
1525 minSize: oldMaxSizeSettings
1526 ? combineSizes(
1527 oldMaxSizeSettings.minSize,
1528 item.cacheGroup._minSizeForMaxSize,
1529 Math.max
1530 )
1531 : item.cacheGroup.minSize,
1532 maxAsyncSize: oldMaxSizeSettings
1533 ? combineSizes(
1534 oldMaxSizeSettings.maxAsyncSize,
1535 item.cacheGroup.maxAsyncSize,
1536 Math.min
1537 )
1538 : item.cacheGroup.maxAsyncSize,
1539 maxInitialSize: oldMaxSizeSettings
1540 ? combineSizes(
1541 oldMaxSizeSettings.maxInitialSize,
1542 item.cacheGroup.maxInitialSize,
1543 Math.min
1544 )
1545 : item.cacheGroup.maxInitialSize,
1546 automaticNameDelimiter: item.cacheGroup.automaticNameDelimiter,
1547 keys: oldMaxSizeSettings
1548 ? oldMaxSizeSettings.keys.concat(item.cacheGroup.key)
1549 : [item.cacheGroup.key]
1550 });
1551 }
1552
1553 // remove all modules from other entries and update size
1554 for (const [key, info] of chunksInfoMap) {
1555 if (isOverlap(info.chunks, usedChunks)) {
1556 // update modules and total size
1557 // may remove it from the map when < minSize
1558 let updated = false;
1559 for (const module of item.modules) {
1560 if (info.modules.has(module)) {
1561 // remove module
1562 info.modules.delete(module);
1563 // update size
1564 for (const key of module.getSourceTypes()) {
1565 info.sizes[key] -= module.size(key);
1566 }
1567 updated = true;
1568 }
1569 }
1570 if (updated) {
1571 if (info.modules.size === 0) {
1572 chunksInfoMap.delete(key);
1573 continue;
1574 }
1575 if (
1576 removeMinSizeViolatingModules(info) ||
1577 !checkMinSizeReduction(
1578 info.sizes,
1579 info.cacheGroup.minSizeReduction,
1580 info.chunks.size
1581 )
1582 ) {
1583 chunksInfoMap.delete(key);
1584 continue;
1585 }
1586 }
1587 }
1588 }
1589 }
1590
1591 logger.timeEnd("queue");
1592
1593 logger.time("maxSize");
1594
1595 /** @type {Set<string>} */
1596 const incorrectMinMaxSizeSet = new Set();
1597
1598 const { outputOptions } = compilation;
1599
1600 // Make sure that maxSize is fulfilled
1601 for (const chunk of Array.from(compilation.chunks)) {
1602 const chunkConfig = maxSizeQueueMap.get(chunk);
1603 const {
1604 minSize,
1605 maxAsyncSize,
1606 maxInitialSize,
1607 automaticNameDelimiter
1608 } = chunkConfig || this.options.fallbackCacheGroup;
1609 /** @type {SplitChunksSizes} */
1610 let maxSize;
1611 if (chunk.isOnlyInitial()) {
1612 maxSize = maxInitialSize;
1613 } else if (chunk.canBeInitial()) {
1614 maxSize = combineSizes(maxAsyncSize, maxInitialSize, Math.min);
1615 } else {
1616 maxSize = maxAsyncSize;
1617 }
1618 if (Object.keys(maxSize).length === 0) {
1619 continue;
1620 }
1621 for (const key of Object.keys(maxSize)) {
1622 const maxSizeValue = maxSize[key];
1623 const minSizeValue = minSize[key];
1624 if (
1625 typeof minSizeValue === "number" &&
1626 minSizeValue > maxSizeValue
1627 ) {
1628 const keys = chunkConfig && chunkConfig.keys;
1629 const warningKey = `${
1630 keys && keys.join()
1631 } ${minSizeValue} ${maxSizeValue}`;
1632 if (!incorrectMinMaxSizeSet.has(warningKey)) {
1633 incorrectMinMaxSizeSet.add(warningKey);
1634 compilation.warnings.push(
1635 new MinMaxSizeWarning(keys, minSizeValue, maxSizeValue)
1636 );
1637 }
1638 }
1639 }
1640 const results = deterministicGroupingForModules({
1641 minSize,
1642 maxSize: mapObject(maxSize, (value, key) => {
1643 const minSizeValue = minSize[key];
1644 return typeof minSizeValue === "number"
1645 ? Math.max(value, minSizeValue)
1646 : value;
1647 }),
1648 items: chunkGraph.getChunkModulesIterable(chunk),
1649 getKey(module) {
1650 const cache = getKeyCache.get(module);
1651 if (cache !== undefined) return cache;
1652 const ident = cachedMakePathsRelative(module.identifier());
1653 const nameForCondition =
1654 module.nameForCondition && module.nameForCondition();
1655 const name = nameForCondition
1656 ? cachedMakePathsRelative(nameForCondition)
1657 : ident.replace(/^.*!|\?[^?!]*$/g, "");
1658 const fullKey =
1659 name +
1660 automaticNameDelimiter +
1661 hashFilename(ident, outputOptions);
1662 const key = requestToId(fullKey);
1663 getKeyCache.set(module, key);
1664 return key;
1665 },
1666 getSize(module) {
1667 const size = Object.create(null);
1668 for (const key of module.getSourceTypes()) {
1669 size[key] = module.size(key);
1670 }
1671 return size;
1672 }
1673 });
1674 if (results.length <= 1) {
1675 continue;
1676 }
1677 for (let i = 0; i < results.length; i++) {
1678 const group = results[i];
1679 const key = this.options.hidePathInfo
1680 ? hashFilename(group.key, outputOptions)
1681 : group.key;
1682 let name = chunk.name
1683 ? chunk.name + automaticNameDelimiter + key
1684 : null;
1685 if (name && name.length > 100) {
1686 name =
1687 name.slice(0, 100) +
1688 automaticNameDelimiter +
1689 hashFilename(name, outputOptions);
1690 }
1691 if (i !== results.length - 1) {
1692 const newPart = compilation.addChunk(name);
1693 chunk.split(newPart);
1694 newPart.chunkReason = chunk.chunkReason;
1695 // Add all modules to the new chunk
1696 for (const module of group.items) {
1697 if (!module.chunkCondition(newPart, compilation)) {
1698 continue;
1699 }
1700 // Add module to new chunk
1701 chunkGraph.connectChunkAndModule(newPart, module);
1702 // Remove module from used chunks
1703 chunkGraph.disconnectChunkAndModule(chunk, module);
1704 }
1705 } else {
1706 // change the chunk to be a part
1707 chunk.name = name;
1708 }
1709 }
1710 }
1711 logger.timeEnd("maxSize");
1712 }
1713 );
1714 });
1715 }
1716};