UNPKG

6.63 kBJavaScriptView Raw
1const crypto = require('crypto');
2const fs = require('fs');
3
4function OutputHash({
5 validateOutput = false,
6 validateOutputRegex = /^.*$/,
7} = {}) {
8 this.validateOutput = validateOutput;
9 this.validateOutputRegex = validateOutputRegex;
10}
11
12/**
13 * Replaces a string in an asset
14 */
15function replaceStringInAsset(asset, source, target) {
16 const sourceRE = new RegExp(source, 'g');
17
18 if (typeof asset === 'string') {
19 return asset.replace(sourceRE, target);
20 }
21
22 // ReplaceSource
23 if ('_source' in asset) {
24 asset._source = replaceStringInAsset(asset._source, source, target);
25 return asset;
26 }
27
28 // CachedSource
29 if ('_cachedSource' in asset) {
30 asset._cachedSource = asset.source().replace(sourceRE, target);
31 return asset;
32 }
33
34 // RawSource / SourceMapSource
35 if ('_value' in asset) {
36 asset._value = asset.source().replace(sourceRE, target);
37 return asset;
38 }
39
40 // ConcatSource
41 if ('children' in asset) {
42 asset.children = asset.children.map(child => replaceStringInAsset(child, source, target));
43 return asset;
44 }
45
46 throw new Error(`Unknown asset type (${asset.constructor.name})!. ` +
47 'Unfortunately this type of asset is not supported yet. ' +
48 'Please raise an issue and we will look into it asap');
49}
50
51/**
52 * Computes the new hash of a chunk.
53 *
54 * This function updates the *name* of the main file (i.e. source code), and the *content* of the
55 * secondary files (i.e source maps)
56 */
57function reHashChunk(chunk, assets, hashFn) {
58 const oldHash = chunk.renderedHash;
59 const oldChunkName = chunk.files[0];
60 const asset = assets[oldChunkName];
61 const { fullHash, shortHash } = hashFn(asset.source());
62 const newChunkName = oldChunkName.replace(oldHash, shortHash);
63
64 // Update the main file of the chunk with the new name
65 chunk.hash = fullHash;
66 chunk.renderedHash = shortHash;
67 chunk.files[0] = newChunkName;
68
69 // Update the asset associated with that file
70 asset._name = newChunkName;
71 delete assets[oldChunkName];
72 assets[newChunkName] = asset;
73
74 // Update the content of the rest of the files in the chunk
75 chunk.files.slice(1).forEach((file) => {
76 const secondaryAsset = assets[file];
77 replaceStringInAsset(secondaryAsset, oldHash, shortHash);
78 });
79
80 return {
81 oldHash,
82 newHash: shortHash,
83 };
84}
85
86/**
87 * Replaces old hashes for new hashes in chunk files.
88 *
89 * This function iterates through file contents and replaces all the ocurrences of old hashes
90 * for new ones. We assume hashes are unique enough, so that we don't accidentally hit a
91 * collision and replace existing data.
92 */
93function replaceOldHashForNewInChunkFiles(chunk, assets, oldHashToNewHashMap) {
94 chunk.files.forEach((file) => {
95 Object.keys(oldHashToNewHashMap).forEach((oldHash) => {
96 const newHash = oldHashToNewHashMap[oldHash];
97 replaceStringInAsset(assets[file], oldHash, newHash);
98 });
99 });
100}
101
102OutputHash.prototype.apply = function apply(compiler) {
103 let hashFn;
104
105 compiler.hooks.compilation.tap('OutputHash', (compilation) => {
106 const { outputOptions } = compilation;
107 const {
108 hashFunction, hashDigest, hashDigestLength, hashSalt,
109 } = outputOptions;
110
111 // Reuses webpack options
112 hashFn = (input) => {
113 const hashObj = crypto.createHash(hashFunction).update(input);
114 if (hashSalt) hashObj.update(hashSalt);
115 const fullHash = hashObj.digest(hashDigest);
116 return { fullHash, shortHash: fullHash.substr(0, hashDigestLength) };
117 };
118
119 // Webpack does not pass chunks and assets to any compilation step, but we need both.
120 // To get them, we hook into 'optimize-chunk-assets' and save the chunks for processing
121 // them later.
122 compilation.hooks.afterOptimizeChunks.tap('Capture chunks', (chunks, chunkGroups) => {
123 this.chunks = chunks;
124 this.chunkGroups = chunkGroups;
125 });
126
127 compilation.hooks.afterOptimizeAssets.tap('Update chunks', (assets) => {
128 const sortedChunks = [];
129 const visitedGroups = [];
130 const nameMap = {};
131
132 const extractInOrder = (group) => {
133 // Mark the group as processed
134 visitedGroups.push(group);
135
136 // For each child group, process it if it hasn't been processed before
137 group.getChildren().forEach((child) => {
138 if (!visitedGroups.includes(child)) extractInOrder(child);
139 });
140
141 // For each chunk in this group
142 // - Get all groups containing that chunk (that includes this group)
143 // - If the group hasn't been processed yet, process it (this will skip current
144 // group)
145 // - After all groups containing the chunk have been processed, add the chunk to
146 // the list of sortedChunks
147 group.chunks.forEach((chunk) => {
148 Array.from(chunk.groupsIterable).forEach((parentGroup) => {
149 if (!visitedGroups.includes(parentGroup)) extractInOrder(parentGroup);
150 });
151 if (!sortedChunks.includes(chunk)) sortedChunks.push(chunk);
152 });
153 };
154
155 this.chunkGroups.forEach(extractInOrder);
156
157 sortedChunks.forEach((chunk) => {
158 replaceOldHashForNewInChunkFiles(chunk, assets, nameMap);
159 const { newHash, oldHash } = reHashChunk(chunk, assets, hashFn);
160 nameMap[oldHash] = newHash;
161 });
162 });
163 });
164
165 if (this.validateOutput) {
166 compiler.hooks.afterEmit.tapAsync('Validate output', (compilation, callback) => {
167 let err;
168 Object.keys(compilation.assets)
169 .filter(assetName => assetName.match(this.validateOutputRegex))
170 .forEach((assetName) => {
171 const asset = compilation.assets[assetName];
172 const path = asset.existsAt;
173 const assetContent = fs.readFileSync(path, 'utf8');
174 const { shortHash } = hashFn(assetContent);
175 if (!assetName.includes(shortHash)) {
176 err = new Error(`The hash in ${assetName} does not match the hash of the content (${shortHash})`);
177 }
178 });
179 return callback(err);
180 });
181 }
182};
183
184module.exports = OutputHash;