1 | const crypto = require('crypto');
|
2 | const fs = require('fs');
|
3 |
|
4 | function OutputHash({
|
5 | manifestFiles = [],
|
6 | validateOutput = false,
|
7 | validateOutputRegex = /^.*$/,
|
8 | } = {}) {
|
9 | this.manifestFiles = manifestFiles;
|
10 | this.validateOutput = validateOutput;
|
11 | this.validateOutputRegex = validateOutputRegex;
|
12 | }
|
13 |
|
14 |
|
15 |
|
16 |
|
17 | function replaceStringInAsset(asset, source, target) {
|
18 | const sourceRE = new RegExp(source, 'g');
|
19 |
|
20 | if (typeof asset === 'string') {
|
21 | return asset.replace(sourceRE, target);
|
22 | }
|
23 |
|
24 |
|
25 | if ('_source' in asset) {
|
26 | asset._source = replaceStringInAsset(asset._source, source, target);
|
27 | return asset;
|
28 | }
|
29 |
|
30 |
|
31 | if ('_cachedSource' in asset) {
|
32 | asset._cachedSource = asset.source().replace(sourceRE, target);
|
33 | return asset;
|
34 | }
|
35 |
|
36 |
|
37 | if ('_value' in asset) {
|
38 | asset._value = asset.source().replace(sourceRE, target);
|
39 | return asset;
|
40 | }
|
41 |
|
42 |
|
43 | if ('children' in asset) {
|
44 | asset.children = asset.children.map(child => replaceStringInAsset(child, source, target));
|
45 | return asset;
|
46 | }
|
47 |
|
48 | throw new Error(`Unknown asset type (${asset.constructor.name})!. ` +
|
49 | 'Unfortunately this type of asset is not supported yet. ' +
|
50 | 'Please raise an issue and we will look into it asap');
|
51 | }
|
52 |
|
53 |
|
54 |
|
55 |
|
56 |
|
57 |
|
58 |
|
59 | function reHashChunk(chunk, assets, hashFn) {
|
60 | const oldHash = chunk.renderedHash;
|
61 | const oldChunkName = chunk.files[0];
|
62 | const asset = assets[oldChunkName];
|
63 | const { fullHash, shortHash } = hashFn(asset.source());
|
64 | const newChunkName = oldChunkName.replace(oldHash, shortHash);
|
65 |
|
66 |
|
67 | chunk.hash = fullHash;
|
68 | chunk.renderedHash = shortHash;
|
69 | chunk.files[0] = newChunkName;
|
70 |
|
71 |
|
72 | asset._name = newChunkName;
|
73 | delete assets[oldChunkName];
|
74 | assets[newChunkName] = asset;
|
75 |
|
76 |
|
77 | chunk.files.slice(1).forEach((file) => {
|
78 | const secondaryAsset = assets[file];
|
79 | replaceStringInAsset(secondaryAsset, oldHash, shortHash);
|
80 | });
|
81 |
|
82 | return {
|
83 | oldHash,
|
84 | newHash: shortHash,
|
85 | };
|
86 | }
|
87 |
|
88 |
|
89 |
|
90 |
|
91 |
|
92 |
|
93 |
|
94 |
|
95 | function replaceOldHashForNewInChunkFiles(chunk, assets, oldHashToNewHashMap) {
|
96 | chunk.files.forEach((file) => {
|
97 | Object.keys(oldHashToNewHashMap).forEach((oldHash) => {
|
98 | const newHash = oldHashToNewHashMap[oldHash];
|
99 | replaceStringInAsset(assets[file], oldHash, newHash);
|
100 | });
|
101 | });
|
102 | }
|
103 |
|
104 | function flatten(arr) {
|
105 | if (!Array.isArray(arr)) return arr;
|
106 | return arr.reduce((acc, i) => acc.concat(flatten(i)), []);
|
107 | }
|
108 |
|
109 | function getAllParents(chunkGroup, parents, visitedGroups) {
|
110 | if (visitedGroups.includes(chunkGroup)) return;
|
111 | visitedGroups.push(chunkGroup);
|
112 |
|
113 | chunkGroup.getParents().forEach((parentGroup) => {
|
114 | parents.push(parentGroup.chunks.filter(chunk => !parents.includes(chunk)));
|
115 | getAllParents(parentGroup, parents, visitedGroups);
|
116 | });
|
117 | }
|
118 |
|
119 | OutputHash.prototype.apply = function apply(compiler) {
|
120 | let hashFn;
|
121 |
|
122 | compiler.hooks.compilation.tap('OutputHash', (compilation) => {
|
123 | const { outputOptions } = compilation;
|
124 | const {
|
125 | hashFunction, hashDigest, hashDigestLength, hashSalt,
|
126 | } = outputOptions;
|
127 |
|
128 |
|
129 | hashFn = (input) => {
|
130 | const hashObj = crypto.createHash(hashFunction).update(input);
|
131 | if (hashSalt) hashObj.update(hashSalt);
|
132 | const fullHash = hashObj.digest(hashDigest);
|
133 | return { fullHash, shortHash: fullHash.substr(0, hashDigestLength) };
|
134 | };
|
135 |
|
136 |
|
137 |
|
138 |
|
139 | compilation.hooks.afterOptimizeChunks.tap('Capture chunks', (chunks, chunkGroups) => {
|
140 | this.chunks = chunks;
|
141 | this.chunkGroups = chunkGroups;
|
142 | });
|
143 |
|
144 | compilation.hooks.afterOptimizeAssets.tap('Update chunks', (assets) => {
|
145 |
|
146 | const nonManifestChunks = this.chunks.filter(chunk =>
|
147 | !this.manifestFiles.includes(chunk.name));
|
148 |
|
149 | const chunksByDependency = [];
|
150 |
|
151 |
|
152 |
|
153 | while (nonManifestChunks.length) {
|
154 | let i = 0;
|
155 |
|
156 | while (i < nonManifestChunks.length) {
|
157 | const currentChunk = nonManifestChunks[i];
|
158 |
|
159 |
|
160 |
|
161 | let parents = [];
|
162 | Array.from(currentChunk.groupsIterable)
|
163 | .forEach(group => getAllParents(group, parents, []));
|
164 | parents = flatten(parents).filter(parent => parent !== currentChunk);
|
165 |
|
166 | const hasNoParent = !parents || parents.length === 0;
|
167 | const containsChunk = (chunkList, chunk) =>
|
168 | chunkList.map(c => String(c.id)).indexOf(String(chunk.id)) !== -1;
|
169 |
|
170 | const isParentAccountedFor = p =>
|
171 | containsChunk(chunksByDependency, p)
|
172 | || !containsChunk(nonManifestChunks, p);
|
173 |
|
174 | if (hasNoParent || parents.every(isParentAccountedFor)) {
|
175 | chunksByDependency.push(currentChunk);
|
176 | nonManifestChunks.splice(i, 1);
|
177 | } else {
|
178 | i += 1;
|
179 | }
|
180 | }
|
181 | }
|
182 |
|
183 | const chunksByDependencyDesc = chunksByDependency.reverse();
|
184 |
|
185 | const nameMap = {};
|
186 |
|
187 |
|
188 |
|
189 |
|
190 |
|
191 | chunksByDependencyDesc.forEach((chunk) => {
|
192 | replaceOldHashForNewInChunkFiles(chunk, assets, nameMap);
|
193 | const { newHash, oldHash } = reHashChunk(chunk, assets, hashFn);
|
194 | nameMap[oldHash] = newHash;
|
195 | });
|
196 |
|
197 |
|
198 |
|
199 | this.chunks
|
200 | .filter(chunk => this.manifestFiles.includes(chunk.name))
|
201 | .forEach((chunk) => {
|
202 | replaceOldHashForNewInChunkFiles(chunk, assets, nameMap);
|
203 | reHashChunk(chunk, assets, hashFn);
|
204 | });
|
205 | });
|
206 | });
|
207 |
|
208 | if (this.validateOutput) {
|
209 | compiler.hooks.afterEmit.tapAsync('Validate output', (compilation, callback) => {
|
210 | let err;
|
211 | Object.keys(compilation.assets)
|
212 | .filter(assetName => assetName.match(this.validateOutputRegex))
|
213 | .forEach((assetName) => {
|
214 | const asset = compilation.assets[assetName];
|
215 | const path = asset.existsAt;
|
216 | const assetContent = fs.readFileSync(path, 'utf8');
|
217 | const { shortHash } = hashFn(assetContent);
|
218 | if (!assetName.includes(shortHash)) {
|
219 | err = new Error(`The hash in ${assetName} does not match the hash of the content (${shortHash})`);
|
220 | }
|
221 | });
|
222 | return callback(err);
|
223 | });
|
224 | }
|
225 | };
|
226 |
|
227 | module.exports = OutputHash;
|