1 | const crypto = require('crypto');
|
2 | const fs = require('fs');
|
3 |
|
4 | function OutputHash({
|
5 | validateOutput = false,
|
6 | validateOutputRegex = /^.*$/,
|
7 | } = {}) {
|
8 | this.validateOutput = validateOutput;
|
9 | this.validateOutputRegex = validateOutputRegex;
|
10 | }
|
11 |
|
12 |
|
13 |
|
14 |
|
15 | function replaceStringInAsset(asset, source, target) {
|
16 | const sourceRE = new RegExp(source, 'g');
|
17 |
|
18 | if (typeof asset === 'string') {
|
19 | return asset.replace(sourceRE, target);
|
20 | }
|
21 |
|
22 |
|
23 | if ('_source' in asset) {
|
24 | asset._source = replaceStringInAsset(asset._source, source, target);
|
25 | return asset;
|
26 | }
|
27 |
|
28 |
|
29 | if ('_cachedSource' in asset) {
|
30 | asset._cachedSource = asset.source().replace(sourceRE, target);
|
31 | return asset;
|
32 | }
|
33 |
|
34 |
|
35 | if ('_value' in asset) {
|
36 | asset._value = asset.source().replace(sourceRE, target);
|
37 | return asset;
|
38 | }
|
39 |
|
40 |
|
41 | if ('children' in asset) {
|
42 | asset.children = asset.children.map(child => replaceStringInAsset(child, source, target));
|
43 | return asset;
|
44 | }
|
45 |
|
46 | throw new Error(`Unknown asset type (${asset.constructor.name})!. ` +
|
47 | 'Unfortunately this type of asset is not supported yet. ' +
|
48 | 'Please raise an issue and we will look into it asap');
|
49 | }
|
50 |
|
51 |
|
52 |
|
53 |
|
54 |
|
55 |
|
56 |
|
57 | function reHashChunk(chunk, assets, hashFn) {
|
58 | const oldHash = chunk.renderedHash;
|
59 | const oldChunkName = chunk.files[0];
|
60 | const asset = assets[oldChunkName];
|
61 | const { fullHash, shortHash } = hashFn(asset.source());
|
62 | const newChunkName = oldChunkName.replace(oldHash, shortHash);
|
63 |
|
64 |
|
65 | chunk.hash = fullHash;
|
66 | chunk.renderedHash = shortHash;
|
67 | chunk.files[0] = newChunkName;
|
68 |
|
69 |
|
70 | asset._name = newChunkName;
|
71 | delete assets[oldChunkName];
|
72 | assets[newChunkName] = asset;
|
73 |
|
74 |
|
75 | chunk.files.slice(1).forEach((file) => {
|
76 | const secondaryAsset = assets[file];
|
77 | replaceStringInAsset(secondaryAsset, oldHash, shortHash);
|
78 | });
|
79 |
|
80 | return {
|
81 | oldHash,
|
82 | newHash: shortHash,
|
83 | };
|
84 | }
|
85 |
|
86 |
|
87 |
|
88 |
|
89 |
|
90 |
|
91 |
|
92 |
|
93 | function replaceOldHashForNewInChunkFiles(chunk, assets, oldHashToNewHashMap) {
|
94 | chunk.files.forEach((file) => {
|
95 | Object.keys(oldHashToNewHashMap).forEach((oldHash) => {
|
96 | const newHash = oldHashToNewHashMap[oldHash];
|
97 | replaceStringInAsset(assets[file], oldHash, newHash);
|
98 | });
|
99 | });
|
100 | }
|
101 |
|
102 | OutputHash.prototype.apply = function apply(compiler) {
|
103 | let hashFn;
|
104 |
|
105 | compiler.hooks.compilation.tap('OutputHash', (compilation) => {
|
106 | const { outputOptions } = compilation;
|
107 | const {
|
108 | hashFunction, hashDigest, hashDigestLength, hashSalt,
|
109 | } = outputOptions;
|
110 |
|
111 |
|
112 | hashFn = (input) => {
|
113 | const hashObj = crypto.createHash(hashFunction).update(input);
|
114 | if (hashSalt) hashObj.update(hashSalt);
|
115 | const fullHash = hashObj.digest(hashDigest);
|
116 | return { fullHash, shortHash: fullHash.substr(0, hashDigestLength) };
|
117 | };
|
118 |
|
119 |
|
120 |
|
121 |
|
122 | compilation.hooks.afterOptimizeChunks.tap('Capture chunks', (chunks, chunkGroups) => {
|
123 | this.chunks = chunks;
|
124 | this.chunkGroups = chunkGroups;
|
125 | });
|
126 |
|
127 | compilation.hooks.afterOptimizeAssets.tap('Update chunks', (assets) => {
|
128 | const sortedChunks = [];
|
129 | const visitedGroups = [];
|
130 | const nameMap = {};
|
131 |
|
132 | const extractInOrder = (group) => {
|
133 |
|
134 | visitedGroups.push(group);
|
135 |
|
136 |
|
137 | group.getChildren().forEach((child) => {
|
138 | if (!visitedGroups.includes(child)) extractInOrder(child);
|
139 | });
|
140 |
|
141 |
|
142 |
|
143 |
|
144 |
|
145 |
|
146 |
|
147 | group.chunks.forEach((chunk) => {
|
148 | Array.from(chunk.groupsIterable).forEach((parentGroup) => {
|
149 | if (!visitedGroups.includes(parentGroup)) extractInOrder(parentGroup);
|
150 | });
|
151 | if (!sortedChunks.includes(chunk)) sortedChunks.push(chunk);
|
152 | });
|
153 | };
|
154 |
|
155 | this.chunkGroups.forEach(extractInOrder);
|
156 |
|
157 | sortedChunks.forEach((chunk) => {
|
158 | replaceOldHashForNewInChunkFiles(chunk, assets, nameMap);
|
159 | const { newHash, oldHash } = reHashChunk(chunk, assets, hashFn);
|
160 | nameMap[oldHash] = newHash;
|
161 | });
|
162 | });
|
163 | });
|
164 |
|
165 | if (this.validateOutput) {
|
166 | compiler.hooks.afterEmit.tapAsync('Validate output', (compilation, callback) => {
|
167 | let err;
|
168 | Object.keys(compilation.assets)
|
169 | .filter(assetName => assetName.match(this.validateOutputRegex))
|
170 | .forEach((assetName) => {
|
171 | const asset = compilation.assets[assetName];
|
172 | const path = asset.existsAt;
|
173 | const assetContent = fs.readFileSync(path, 'utf8');
|
174 | const { shortHash } = hashFn(assetContent);
|
175 | if (!assetName.includes(shortHash)) {
|
176 | err = new Error(`The hash in ${assetName} does not match the hash of the content (${shortHash})`);
|
177 | }
|
178 | });
|
179 | return callback(err);
|
180 | });
|
181 | }
|
182 | };
|
183 |
|
184 | module.exports = OutputHash;
|