UNPKG

10.2 kBJavaScriptView Raw
1/*
2 MIT License http://www.opensource.org/licenses/mit-license.php
3 Author Tobias Koppers @sokra
4*/
5
6"use strict";
7
8const { STAGE_ADVANCED } = require("../OptimizationStages");
9const { intersect } = require("../util/SetHelpers");
10const {
11 compareModulesByIdentifier,
12 compareChunks
13} = require("../util/comparators");
14const createSchemaValidation = require("../util/create-schema-validation");
15const identifierUtils = require("../util/identifier");
16
17/** @typedef {import("../../declarations/plugins/optimize/AggressiveSplittingPlugin").AggressiveSplittingPluginOptions} AggressiveSplittingPluginOptions */
18/** @typedef {import("../Chunk")} Chunk */
19/** @typedef {import("../ChunkGraph")} ChunkGraph */
20/** @typedef {import("../Compiler")} Compiler */
21/** @typedef {import("../Module")} Module */
22
23const validate = createSchemaValidation(
24 require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.check.js"),
25 () =>
26 require("../../schemas/plugins/optimize/AggressiveSplittingPlugin.json"),
27 {
28 name: "Aggressive Splitting Plugin",
29 baseDataPath: "options"
30 }
31);
32
33const moveModuleBetween = (chunkGraph, oldChunk, newChunk) => {
34 return module => {
35 chunkGraph.disconnectChunkAndModule(oldChunk, module);
36 chunkGraph.connectChunkAndModule(newChunk, module);
37 };
38};
39
40/**
41 * @param {ChunkGraph} chunkGraph the chunk graph
42 * @param {Chunk} chunk the chunk
43 * @returns {function(Module): boolean} filter for entry module
44 */
45const isNotAEntryModule = (chunkGraph, chunk) => {
46 return module => {
47 return !chunkGraph.isEntryModuleInChunk(module, chunk);
48 };
49};
50
51/** @type {WeakSet<Chunk>} */
52const recordedChunks = new WeakSet();
53
54class AggressiveSplittingPlugin {
55 /**
56 * @param {AggressiveSplittingPluginOptions=} options options object
57 */
58 constructor(options = {}) {
59 validate(options);
60
61 this.options = options;
62 if (typeof this.options.minSize !== "number") {
63 this.options.minSize = 30 * 1024;
64 }
65 if (typeof this.options.maxSize !== "number") {
66 this.options.maxSize = 50 * 1024;
67 }
68 if (typeof this.options.chunkOverhead !== "number") {
69 this.options.chunkOverhead = 0;
70 }
71 if (typeof this.options.entryChunkMultiplicator !== "number") {
72 this.options.entryChunkMultiplicator = 1;
73 }
74 }
75
76 /**
77 * @param {Chunk} chunk the chunk to test
78 * @returns {boolean} true if the chunk was recorded
79 */
80 static wasChunkRecorded(chunk) {
81 return recordedChunks.has(chunk);
82 }
83
84 /**
85 * Apply the plugin
86 * @param {Compiler} compiler the compiler instance
87 * @returns {void}
88 */
89 apply(compiler) {
90 compiler.hooks.thisCompilation.tap(
91 "AggressiveSplittingPlugin",
92 compilation => {
93 let needAdditionalSeal = false;
94 let newSplits;
95 let fromAggressiveSplittingSet;
96 let chunkSplitDataMap;
97 compilation.hooks.optimize.tap("AggressiveSplittingPlugin", () => {
98 newSplits = [];
99 fromAggressiveSplittingSet = new Set();
100 chunkSplitDataMap = new Map();
101 });
102 compilation.hooks.optimizeChunks.tap(
103 {
104 name: "AggressiveSplittingPlugin",
105 stage: STAGE_ADVANCED
106 },
107 chunks => {
108 const chunkGraph = compilation.chunkGraph;
109 // Precompute stuff
110 const nameToModuleMap = new Map();
111 const moduleToNameMap = new Map();
112 const makePathsRelative =
113 identifierUtils.makePathsRelative.bindContextCache(
114 compiler.context,
115 compiler.root
116 );
117 for (const m of compilation.modules) {
118 const name = makePathsRelative(m.identifier());
119 nameToModuleMap.set(name, m);
120 moduleToNameMap.set(m, name);
121 }
122
123 // Check used chunk ids
124 const usedIds = new Set();
125 for (const chunk of chunks) {
126 usedIds.add(chunk.id);
127 }
128
129 const recordedSplits =
130 (compilation.records && compilation.records.aggressiveSplits) ||
131 [];
132 const usedSplits = newSplits
133 ? recordedSplits.concat(newSplits)
134 : recordedSplits;
135
136 const minSize = this.options.minSize;
137 const maxSize = this.options.maxSize;
138
139 const applySplit = splitData => {
140 // Cannot split if id is already taken
141 if (splitData.id !== undefined && usedIds.has(splitData.id)) {
142 return false;
143 }
144
145 // Get module objects from names
146 const selectedModules = splitData.modules.map(name =>
147 nameToModuleMap.get(name)
148 );
149
150 // Does the modules exist at all?
151 if (!selectedModules.every(Boolean)) return false;
152
153 // Check if size matches (faster than waiting for hash)
154 let size = 0;
155 for (const m of selectedModules) size += m.size();
156 if (size !== splitData.size) return false;
157
158 // get chunks with all modules
159 const selectedChunks = intersect(
160 selectedModules.map(
161 m => new Set(chunkGraph.getModuleChunksIterable(m))
162 )
163 );
164
165 // No relevant chunks found
166 if (selectedChunks.size === 0) return false;
167
168 // The found chunk is already the split or similar
169 if (
170 selectedChunks.size === 1 &&
171 chunkGraph.getNumberOfChunkModules(
172 Array.from(selectedChunks)[0]
173 ) === selectedModules.length
174 ) {
175 const chunk = Array.from(selectedChunks)[0];
176 if (fromAggressiveSplittingSet.has(chunk)) return false;
177 fromAggressiveSplittingSet.add(chunk);
178 chunkSplitDataMap.set(chunk, splitData);
179 return true;
180 }
181
182 // split the chunk into two parts
183 const newChunk = compilation.addChunk();
184 newChunk.chunkReason = "aggressive splitted";
185 for (const chunk of selectedChunks) {
186 selectedModules.forEach(
187 moveModuleBetween(chunkGraph, chunk, newChunk)
188 );
189 chunk.split(newChunk);
190 chunk.name = null;
191 }
192 fromAggressiveSplittingSet.add(newChunk);
193 chunkSplitDataMap.set(newChunk, splitData);
194
195 if (splitData.id !== null && splitData.id !== undefined) {
196 newChunk.id = splitData.id;
197 newChunk.ids = [splitData.id];
198 }
199 return true;
200 };
201
202 // try to restore to recorded splitting
203 let changed = false;
204 for (let j = 0; j < usedSplits.length; j++) {
205 const splitData = usedSplits[j];
206 if (applySplit(splitData)) changed = true;
207 }
208
209 // for any chunk which isn't splitted yet, split it and create a new entry
210 // start with the biggest chunk
211 const cmpFn = compareChunks(chunkGraph);
212 const sortedChunks = Array.from(chunks).sort((a, b) => {
213 const diff1 =
214 chunkGraph.getChunkModulesSize(b) -
215 chunkGraph.getChunkModulesSize(a);
216 if (diff1) return diff1;
217 const diff2 =
218 chunkGraph.getNumberOfChunkModules(a) -
219 chunkGraph.getNumberOfChunkModules(b);
220 if (diff2) return diff2;
221 return cmpFn(a, b);
222 });
223 for (const chunk of sortedChunks) {
224 if (fromAggressiveSplittingSet.has(chunk)) continue;
225 const size = chunkGraph.getChunkModulesSize(chunk);
226 if (
227 size > maxSize &&
228 chunkGraph.getNumberOfChunkModules(chunk) > 1
229 ) {
230 const modules = chunkGraph
231 .getOrderedChunkModules(chunk, compareModulesByIdentifier)
232 .filter(isNotAEntryModule(chunkGraph, chunk));
233 const selectedModules = [];
234 let selectedModulesSize = 0;
235 for (let k = 0; k < modules.length; k++) {
236 const module = modules[k];
237 const newSize = selectedModulesSize + module.size();
238 if (newSize > maxSize && selectedModulesSize >= minSize) {
239 break;
240 }
241 selectedModulesSize = newSize;
242 selectedModules.push(module);
243 }
244 if (selectedModules.length === 0) continue;
245 const splitData = {
246 modules: selectedModules
247 .map(m => moduleToNameMap.get(m))
248 .sort(),
249 size: selectedModulesSize
250 };
251
252 if (applySplit(splitData)) {
253 newSplits = (newSplits || []).concat(splitData);
254 changed = true;
255 }
256 }
257 }
258 if (changed) return true;
259 }
260 );
261 compilation.hooks.recordHash.tap(
262 "AggressiveSplittingPlugin",
263 records => {
264 // 4. save made splittings to records
265 const allSplits = new Set();
266 const invalidSplits = new Set();
267
268 // Check if some splittings are invalid
269 // We remove invalid splittings and try again
270 for (const chunk of compilation.chunks) {
271 const splitData = chunkSplitDataMap.get(chunk);
272 if (splitData !== undefined) {
273 if (splitData.hash && chunk.hash !== splitData.hash) {
274 // Split was successful, but hash doesn't equal
275 // We can throw away the split since it's useless now
276 invalidSplits.add(splitData);
277 }
278 }
279 }
280
281 if (invalidSplits.size > 0) {
282 records.aggressiveSplits = records.aggressiveSplits.filter(
283 splitData => !invalidSplits.has(splitData)
284 );
285 needAdditionalSeal = true;
286 } else {
287 // set hash and id values on all (new) splittings
288 for (const chunk of compilation.chunks) {
289 const splitData = chunkSplitDataMap.get(chunk);
290 if (splitData !== undefined) {
291 splitData.hash = chunk.hash;
292 splitData.id = chunk.id;
293 allSplits.add(splitData);
294 // set flag for stats
295 recordedChunks.add(chunk);
296 }
297 }
298
299 // Also add all unused historical splits (after the used ones)
300 // They can still be used in some future compilation
301 const recordedSplits =
302 compilation.records && compilation.records.aggressiveSplits;
303 if (recordedSplits) {
304 for (const splitData of recordedSplits) {
305 if (!invalidSplits.has(splitData)) allSplits.add(splitData);
306 }
307 }
308
309 // record all splits
310 records.aggressiveSplits = Array.from(allSplits);
311
312 needAdditionalSeal = false;
313 }
314 }
315 );
316 compilation.hooks.needAdditionalSeal.tap(
317 "AggressiveSplittingPlugin",
318 () => {
319 if (needAdditionalSeal) {
320 needAdditionalSeal = false;
321 return true;
322 }
323 }
324 );
325 }
326 );
327 }
328}
329module.exports = AggressiveSplittingPlugin;