UNPKG

8.52 kBJavaScriptView Raw
1const crypto = require('crypto');
2const fs = require('fs');
3
4function OutputHash({
5 manifestFiles = [],
6 validateOutput = false,
7 validateOutputRegex = /^.*$/,
8} = {}) {
9 this.manifestFiles = manifestFiles;
10 this.validateOutput = validateOutput;
11 this.validateOutputRegex = validateOutputRegex;
12}
13
14/**
15 * Replaces a string in an asset
16 */
17function replaceStringInAsset(asset, source, target) {
18 const sourceRE = new RegExp(source, 'g');
19
20 if (typeof asset === 'string') {
21 return asset.replace(sourceRE, target);
22 }
23
24 // ReplaceSource
25 if ('_source' in asset) {
26 asset._source = replaceStringInAsset(asset._source, source, target);
27 return asset;
28 }
29
30 // CachedSource
31 if ('_cachedSource' in asset) {
32 asset._cachedSource = asset.source().replace(sourceRE, target);
33 return asset;
34 }
35
36 // RawSource / SourceMapSource
37 if ('_value' in asset) {
38 asset._value = asset.source().replace(sourceRE, target);
39 return asset;
40 }
41
42 // ConcatSource
43 if ('children' in asset) {
44 asset.children = asset.children.map(child => replaceStringInAsset(child, source, target));
45 return asset;
46 }
47
48 throw new Error(`Unknown asset type (${asset.constructor.name})!. ` +
49 'Unfortunately this type of asset is not supported yet. ' +
50 'Please raise an issue and we will look into it asap');
51}
52
53/**
54 * Computes the new hash of a chunk.
55 *
56 * This function updates the *name* of the main file (i.e. source code), and the *content* of the
57 * secondary files (i.e source maps)
58 */
59function reHashChunk(chunk, assets, hashFn) {
60 const oldHash = chunk.renderedHash;
61 const oldChunkName = chunk.files[0];
62 const asset = assets[oldChunkName];
63 const { fullHash, shortHash } = hashFn(asset.source());
64 const newChunkName = oldChunkName.replace(oldHash, shortHash);
65
66 // Update the main file of the chunk with the new name
67 chunk.hash = fullHash;
68 chunk.renderedHash = shortHash;
69 chunk.files[0] = newChunkName;
70
71 // Update the asset associated with that file
72 asset._name = newChunkName;
73 delete assets[oldChunkName];
74 assets[newChunkName] = asset;
75
76 // Update the content of the rest of the files in the chunk
77 chunk.files.slice(1).forEach((file) => {
78 const secondaryAsset = assets[file];
79 replaceStringInAsset(secondaryAsset, oldHash, shortHash);
80 });
81
82 return {
83 oldHash,
84 newHash: shortHash,
85 };
86}
87
88/**
89 * Replaces old hashes for new hashes in chunk files.
90 *
91 * This function iterates through file contents and replaces all the ocurrences of old hashes
92 * for new ones. We assume hashes are unique enough, so that we don't accidentally hit a
93 * collision and replace existing data.
94 */
95function replaceOldHashForNewInChunkFiles(chunk, assets, oldHashToNewHashMap) {
96 chunk.files.forEach((file) => {
97 Object.keys(oldHashToNewHashMap).forEach((oldHash) => {
98 const newHash = oldHashToNewHashMap[oldHash];
99 replaceStringInAsset(assets[file], oldHash, newHash);
100 });
101 });
102}
103
104function flatten(arr) {
105 if (!Array.isArray(arr)) return arr;
106 return arr.reduce((acc, i) => acc.concat(flatten(i)), []);
107}
108
109function getAllParents(chunkGroup, parents, visitedGroups) {
110 if (visitedGroups.includes(chunkGroup)) return;
111 visitedGroups.push(chunkGroup);
112
113 chunkGroup.getParents().forEach((parentGroup) => {
114 parents.push(parentGroup.chunks.filter(chunk => !parents.includes(chunk)));
115 getAllParents(parentGroup, parents, visitedGroups);
116 });
117}
118
119OutputHash.prototype.apply = function apply(compiler) {
120 let hashFn;
121
122 compiler.hooks.compilation.tap('OutputHash', (compilation) => {
123 const { outputOptions } = compilation;
124 const {
125 hashFunction, hashDigest, hashDigestLength, hashSalt,
126 } = outputOptions;
127
128 // Reuses webpack options
129 hashFn = (input) => {
130 const hashObj = crypto.createHash(hashFunction).update(input);
131 if (hashSalt) hashObj.update(hashSalt);
132 const fullHash = hashObj.digest(hashDigest);
133 return { fullHash, shortHash: fullHash.substr(0, hashDigestLength) };
134 };
135
136 // Webpack does not pass chunks and assets to any compilation step, but we need both.
137 // To get them, we hook into 'optimize-chunk-assets' and save the chunks for processing
138 // them later.
139 compilation.hooks.afterOptimizeChunks.tap('Capture chunks', (chunks, chunkGroups) => {
140 this.chunks = chunks;
141 this.chunkGroups = chunkGroups;
142 });
143
144 compilation.hooks.afterOptimizeAssets.tap('Update chunks', (assets) => {
145 // Sort non-manifest chunks according to their parent dependencies.
146 const nonManifestChunks = this.chunks.filter(chunk =>
147 !this.manifestFiles.includes(chunk.name));
148
149 const chunksByDependency = [];
150
151 // Sort the chunks based on the graph depth (place leafs first, root of the tree
152 // latest)
153 while (nonManifestChunks.length) {
154 let i = 0;
155
156 while (i < nonManifestChunks.length) {
157 const currentChunk = nonManifestChunks[i];
158
159 // Get a list of all chunks that are parent of the currentChunk. A parent is
160 // a chunk that has to be loaded before currentChunk can be loaded.
161 let parents = [];
162 Array.from(currentChunk.groupsIterable)
163 .forEach(group => getAllParents(group, parents, []));
164 parents = flatten(parents).filter(parent => parent !== currentChunk);
165
166 const hasNoParent = !parents || parents.length === 0;
167 const containsChunk = (chunkList, chunk) =>
168 chunkList.map(c => String(c.id)).indexOf(String(chunk.id)) !== -1;
169
170 const isParentAccountedFor = p =>
171 containsChunk(chunksByDependency, p)
172 || !containsChunk(nonManifestChunks, p);
173
174 if (hasNoParent || parents.every(isParentAccountedFor)) {
175 chunksByDependency.push(currentChunk);
176 nonManifestChunks.splice(i, 1);
177 } else {
178 i += 1;
179 }
180 }
181 }
182
183 const chunksByDependencyDesc = chunksByDependency.reverse();
184
185 const nameMap = {};
186
187 // We assume that only the manifest chunk has references to all the other chunks.
188 // It needs to be processed at the end, when we know the new names of all the other
189 // chunks. Non-manifest chunks are processed according to their references
190 // (most referenced -> least referenced).
191 chunksByDependencyDesc.forEach((chunk) => {
192 replaceOldHashForNewInChunkFiles(chunk, assets, nameMap);
193 const { newHash, oldHash } = reHashChunk(chunk, assets, hashFn);
194 nameMap[oldHash] = newHash;
195 });
196
197 // After the main files have been rehashed, we need to update the content of the
198 // manifest files to point to the new rehashed names, and rehash them.
199 this.chunks
200 .filter(chunk => this.manifestFiles.includes(chunk.name))
201 .forEach((chunk) => {
202 replaceOldHashForNewInChunkFiles(chunk, assets, nameMap);
203 reHashChunk(chunk, assets, hashFn);
204 });
205 });
206 });
207
208 if (this.validateOutput) {
209 compiler.hooks.afterEmit.tapAsync('Validate output', (compilation, callback) => {
210 let err;
211 Object.keys(compilation.assets)
212 .filter(assetName => assetName.match(this.validateOutputRegex))
213 .forEach((assetName) => {
214 const asset = compilation.assets[assetName];
215 const path = asset.existsAt;
216 const assetContent = fs.readFileSync(path, 'utf8');
217 const { shortHash } = hashFn(assetContent);
218 if (!assetName.includes(shortHash)) {
219 err = new Error(`The hash in ${assetName} does not match the hash of the content (${shortHash})`);
220 }
221 });
222 return callback(err);
223 });
224 }
225};
226
227module.exports = OutputHash;