UNPKG

13.5 kBJavaScriptView Raw
1'use strict';
2
3const crypto = require('crypto');
4const fs = require('fs');
5const path = require('path');
6const zlib = require('zlib');
7
8const concordance = require('concordance');
9const indentString = require('indent-string');
10const md5Hex = require('md5-hex');
11const convertSourceMap = require('convert-source-map');
12const slash = require('slash');
13const writeFileAtomic = require('write-file-atomic');
14const mem = require('mem');
15
16const concordanceOptions = require('./concordance-options').snapshotManager;
17
18// Increment if encoding layout or Concordance serialization versions change. Previous AVA versions will not be able to
19// decode buffers generated by a newer version, so changing this value will require a major version bump of AVA itself.
20// The version is encoded as an unsigned 16 bit integer.
21const VERSION = 2;
22
23const VERSION_HEADER = Buffer.alloc(2);
24VERSION_HEADER.writeUInt16LE(VERSION);
25
26// The decoder matches on the trailing newline byte (0x0A).
27const READABLE_PREFIX = Buffer.from(`AVA Snapshot v${VERSION}\n`, 'ascii');
28const REPORT_SEPARATOR = Buffer.from('\n\n', 'ascii');
29const REPORT_TRAILING_NEWLINE = Buffer.from('\n', 'ascii');
30
31const MD5_HASH_LENGTH = 16;
32
33class SnapshotError extends Error {
34 constructor(message, snapPath) {
35 super(message);
36 this.name = 'SnapshotError';
37 this.snapPath = snapPath;
38 }
39}
40exports.SnapshotError = SnapshotError;
41
42class ChecksumError extends SnapshotError {
43 constructor(snapPath) {
44 super('Checksum mismatch', snapPath);
45 this.name = 'ChecksumError';
46 }
47}
48exports.ChecksumError = ChecksumError;
49
50class VersionMismatchError extends SnapshotError {
51 constructor(snapPath, version) {
52 super('Unexpected snapshot version', snapPath);
53 this.name = 'VersionMismatchError';
54 this.snapVersion = version;
55 this.expectedVersion = VERSION;
56 }
57}
58exports.VersionMismatchError = VersionMismatchError;
59
60const LEGACY_SNAPSHOT_HEADER = Buffer.from('// Jest Snapshot v1');
61function isLegacySnapshot(buffer) {
62 return LEGACY_SNAPSHOT_HEADER.equals(buffer.slice(0, LEGACY_SNAPSHOT_HEADER.byteLength));
63}
64
65class LegacyError extends SnapshotError {
66 constructor(snapPath) {
67 super('Legacy snapshot file', snapPath);
68 this.name = 'LegacyError';
69 }
70}
71exports.LegacyError = LegacyError;
72
73function tryRead(file) {
74 try {
75 return fs.readFileSync(file);
76 } catch (error) {
77 if (error.code === 'ENOENT') {
78 return null;
79 }
80
81 throw error;
82 }
83}
84
85function withoutLineEndings(buffer) {
86 let checkPosition = buffer.byteLength - 1;
87 while (buffer[checkPosition] === 0x0A || buffer[checkPosition] === 0x0D) {
88 checkPosition--;
89 }
90
91 return buffer.slice(0, checkPosition + 1);
92}
93
94function formatEntry(label, descriptor) {
95 if (label) {
96 label = `> ${label}\n\n`;
97 }
98
99 const codeBlock = indentString(concordance.formatDescriptor(descriptor, concordanceOptions), 4);
100 return Buffer.from(label + codeBlock, 'utf8');
101}
102
103function combineEntries(entries) {
104 const buffers = [];
105 let byteLength = 0;
106
107 const sortedKeys = [...entries.keys()].sort();
108 for (const key of sortedKeys) {
109 const keyBuffer = Buffer.from(`\n\n## ${key}\n\n`, 'utf8');
110 buffers.push(keyBuffer);
111 byteLength += keyBuffer.byteLength;
112
113 const formattedEntries = entries.get(key);
114 const last = formattedEntries[formattedEntries.length - 1];
115 for (const entry of formattedEntries) {
116 buffers.push(entry);
117 byteLength += entry.byteLength;
118
119 if (entry !== last) {
120 buffers.push(REPORT_SEPARATOR);
121 byteLength += REPORT_SEPARATOR.byteLength;
122 }
123 }
124 }
125
126 return {buffers, byteLength};
127}
128
129function generateReport(relFile, snapFile, entries) {
130 const combined = combineEntries(entries);
131 const {buffers} = combined;
132 let {byteLength} = combined;
133
134 const header = Buffer.from(`# Snapshot report for \`${slash(relFile)}\`
135
136The actual snapshot is saved in \`${snapFile}\`.
137
138Generated by [AVA](https://avajs.dev).`, 'utf8');
139 buffers.unshift(header);
140 byteLength += header.byteLength;
141
142 buffers.push(REPORT_TRAILING_NEWLINE);
143 byteLength += REPORT_TRAILING_NEWLINE.byteLength;
144 return Buffer.concat(buffers, byteLength);
145}
146
147function appendReportEntries(existingReport, entries) {
148 const combined = combineEntries(entries);
149 const {buffers} = combined;
150 let {byteLength} = combined;
151
152 const prepend = withoutLineEndings(existingReport);
153 buffers.unshift(prepend);
154 byteLength += prepend.byteLength;
155
156 buffers.push(REPORT_TRAILING_NEWLINE);
157 byteLength += REPORT_TRAILING_NEWLINE.byteLength;
158 return Buffer.concat(buffers, byteLength);
159}
160
161function encodeSnapshots(buffersByHash) {
162 const buffers = [];
163 let byteOffset = 0;
164
165 // Entry start and end pointers are relative to the header length. This means
166 // it's possible to append new entries to an existing snapshot file, without
167 // having to rewrite pointers for existing entries.
168 const headerLength = Buffer.alloc(4);
169 buffers.push(headerLength);
170 byteOffset += 4;
171
172 // Allows 65535 hashes (tests or identified snapshots) per file.
173 const numberHashes = Buffer.alloc(2);
174 numberHashes.writeUInt16LE(buffersByHash.size);
175 buffers.push(numberHashes);
176 byteOffset += 2;
177
178 const entries = [];
179 for (const pair of buffersByHash) {
180 const hash = pair[0];
181 const snapshotBuffers = pair[1];
182
183 buffers.push(Buffer.from(hash, 'hex'));
184 byteOffset += MD5_HASH_LENGTH;
185
186 // Allows 65535 snapshots per hash.
187 const numberSnapshots = Buffer.alloc(2);
188 numberSnapshots.writeUInt16LE(snapshotBuffers.length, 0);
189 buffers.push(numberSnapshots);
190 byteOffset += 2;
191
192 for (const value of snapshotBuffers) {
193 // Each pointer is 32 bits, restricting the total, uncompressed buffer to
194 // 4 GiB.
195 const start = Buffer.alloc(4);
196 const end = Buffer.alloc(4);
197 entries.push({start, end, value});
198
199 buffers.push(start, end);
200 byteOffset += 8;
201 }
202 }
203
204 headerLength.writeUInt32LE(byteOffset, 0);
205
206 let bodyOffset = 0;
207 for (const entry of entries) {
208 const start = bodyOffset;
209 const end = bodyOffset + entry.value.byteLength;
210 entry.start.writeUInt32LE(start, 0);
211 entry.end.writeUInt32LE(end, 0);
212 buffers.push(entry.value);
213 bodyOffset = end;
214 }
215
216 byteOffset += bodyOffset;
217
218 const compressed = zlib.gzipSync(Buffer.concat(buffers, byteOffset));
219 compressed[9] = 0x03; // Override the GZip header containing the OS to always be Linux
220 const md5sum = crypto.createHash('md5').update(compressed).digest();
221 return Buffer.concat([
222 READABLE_PREFIX,
223 VERSION_HEADER,
224 md5sum,
225 compressed
226 ], READABLE_PREFIX.byteLength + VERSION_HEADER.byteLength + MD5_HASH_LENGTH + compressed.byteLength);
227}
228
229function decodeSnapshots(buffer, snapPath) {
230 if (isLegacySnapshot(buffer)) {
231 throw new LegacyError(snapPath);
232 }
233
234 // The version starts after the readable prefix, which is ended by a newline
235 // byte (0x0A).
236 const versionOffset = buffer.indexOf(0x0A) + 1;
237 const version = buffer.readUInt16LE(versionOffset);
238 if (version !== VERSION) {
239 throw new VersionMismatchError(snapPath, version);
240 }
241
242 const md5sumOffset = versionOffset + 2;
243 const compressedOffset = md5sumOffset + MD5_HASH_LENGTH;
244 const compressed = buffer.slice(compressedOffset);
245
246 const md5sum = crypto.createHash('md5').update(compressed).digest();
247 const expectedSum = buffer.slice(md5sumOffset, compressedOffset);
248 if (!md5sum.equals(expectedSum)) {
249 throw new ChecksumError(snapPath);
250 }
251
252 const decompressed = zlib.gunzipSync(compressed);
253 let byteOffset = 0;
254
255 const headerLength = decompressed.readUInt32LE(byteOffset);
256 byteOffset += 4;
257
258 const snapshotsByHash = new Map();
259 const numberHashes = decompressed.readUInt16LE(byteOffset);
260 byteOffset += 2;
261
262 for (let count = 0; count < numberHashes; count++) {
263 const hash = decompressed.toString('hex', byteOffset, byteOffset + MD5_HASH_LENGTH);
264 byteOffset += MD5_HASH_LENGTH;
265
266 const numberSnapshots = decompressed.readUInt16LE(byteOffset);
267 byteOffset += 2;
268
269 const snapshotsBuffers = new Array(numberSnapshots);
270 for (let index = 0; index < numberSnapshots; index++) {
271 const start = decompressed.readUInt32LE(byteOffset) + headerLength;
272 byteOffset += 4;
273 const end = decompressed.readUInt32LE(byteOffset) + headerLength;
274 byteOffset += 4;
275 snapshotsBuffers[index] = decompressed.slice(start, end);
276 }
277
278 // Allow for new entries to be appended to an existing header, which could
279 // lead to the same hash being present multiple times.
280 if (snapshotsByHash.has(hash)) {
281 snapshotsByHash.set(hash, snapshotsByHash.get(hash).concat(snapshotsBuffers));
282 } else {
283 snapshotsByHash.set(hash, snapshotsBuffers);
284 }
285 }
286
287 return snapshotsByHash;
288}
289
290class Manager {
291 constructor(options) {
292 this.appendOnly = options.appendOnly;
293 this.dir = options.dir;
294 this.recordNewSnapshots = options.recordNewSnapshots;
295 this.relFile = options.relFile;
296 this.reportFile = options.reportFile;
297 this.snapFile = options.snapFile;
298 this.snapPath = options.snapPath;
299 this.snapshotsByHash = options.snapshotsByHash;
300
301 this.hasChanges = false;
302 this.reportEntries = new Map();
303 }
304
305 compare(options) {
306 const hash = md5Hex(options.belongsTo);
307 const entries = this.snapshotsByHash.get(hash) || [];
308 const snapshotBuffer = entries[options.index];
309
310 if (!snapshotBuffer) {
311 if (!this.recordNewSnapshots) {
312 return {pass: false};
313 }
314
315 if (options.deferRecording) {
316 const record = this.deferRecord(hash, options);
317 return {pass: true, record};
318 }
319
320 this.record(hash, options);
321 return {pass: true};
322 }
323
324 const actual = concordance.deserialize(snapshotBuffer, concordanceOptions);
325 const expected = concordance.describe(options.expected, concordanceOptions);
326 const pass = concordance.compareDescriptors(actual, expected);
327
328 return {actual, expected, pass};
329 }
330
331 deferRecord(hash, options) {
332 const descriptor = concordance.describe(options.expected, concordanceOptions);
333 const snapshot = concordance.serialize(descriptor);
334 const entry = formatEntry(options.label, descriptor);
335
336 return () => { // Must be called in order!
337 this.hasChanges = true;
338
339 let snapshots = this.snapshotsByHash.get(hash);
340 if (!snapshots) {
341 snapshots = [];
342 this.snapshotsByHash.set(hash, snapshots);
343 }
344
345 if (options.index > snapshots.length) {
346 throw new RangeError(`Cannot record snapshot ${options.index} for ${JSON.stringify(options.belongsTo)}, exceeds expected index of ${snapshots.length}`);
347 }
348
349 if (options.index < snapshots.length) {
350 throw new RangeError(`Cannot record snapshot ${options.index} for ${JSON.stringify(options.belongsTo)}, already exists`);
351 }
352
353 snapshots.push(snapshot);
354
355 if (this.reportEntries.has(options.belongsTo)) {
356 this.reportEntries.get(options.belongsTo).push(entry);
357 } else {
358 this.reportEntries.set(options.belongsTo, [entry]);
359 }
360 };
361 }
362
363 record(hash, options) {
364 const record = this.deferRecord(hash, options);
365 record();
366 }
367
368 save() {
369 if (!this.hasChanges) {
370 return null;
371 }
372
373 const {snapPath} = this;
374 const buffer = encodeSnapshots(this.snapshotsByHash);
375
376 const reportPath = path.join(this.dir, this.reportFile);
377 const existingReport = this.appendOnly ? tryRead(reportPath) : null;
378 const reportBuffer = existingReport ?
379 appendReportEntries(existingReport, this.reportEntries) :
380 generateReport(this.relFile, this.snapFile, this.reportEntries);
381
382 fs.mkdirSync(this.dir, {recursive: true});
383
384 const paths = [snapPath, reportPath];
385 const tmpfileCreated = tmpfile => paths.push(tmpfile);
386 writeFileAtomic.sync(snapPath, buffer, {tmpfileCreated});
387 writeFileAtomic.sync(reportPath, reportBuffer, {tmpfileCreated});
388 return paths;
389 }
390}
391
392const resolveSourceFile = mem(file => {
393 const testDir = path.dirname(file);
394 const buffer = tryRead(file);
395 if (!buffer) {
396 return file; // Assume the file is stubbed in our test suite.
397 }
398
399 const source = buffer.toString();
400 const converter = convertSourceMap.fromSource(source) || convertSourceMap.fromMapFileSource(source, testDir);
401 if (converter) {
402 const map = converter.toObject();
403 const firstSource = `${map.sourceRoot || ''}${map.sources[0]}`;
404 return path.resolve(testDir, firstSource);
405 }
406
407 return file;
408});
409
410const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
411 const testDir = path.dirname(resolveSourceFile(file));
412 if (fixedLocation) {
413 const relativeTestLocation = path.relative(projectDir, testDir);
414 return path.join(fixedLocation, relativeTestLocation);
415 }
416
417 const parts = new Set(path.relative(projectDir, testDir).split(path.sep));
418 if (parts.has('__tests__')) {
419 return path.join(testDir, '__snapshots__');
420 }
421
422 if (parts.has('test') || parts.has('tests')) { // Accept tests, even though it's not in the default test patterns
423 return path.join(testDir, 'snapshots');
424 }
425
426 return testDir;
427}, {cacheKey: ([{file}]) => file});
428
429exports.determineSnapshotDir = determineSnapshotDir;
430
431function load({file, fixedLocation, projectDir, recordNewSnapshots, updating}) {
432 const dir = determineSnapshotDir({file, fixedLocation, projectDir});
433 const relFile = path.relative(projectDir, resolveSourceFile(file));
434 const name = path.basename(relFile);
435 const reportFile = `${name}.md`;
436 const snapFile = `${name}.snap`;
437 const snapPath = path.join(dir, snapFile);
438
439 let appendOnly = !updating;
440 let snapshotsByHash;
441
442 if (!updating) {
443 const buffer = tryRead(snapPath);
444 if (buffer) {
445 snapshotsByHash = decodeSnapshots(buffer, snapPath);
446 } else {
447 appendOnly = false;
448 }
449 }
450
451 return new Manager({
452 appendOnly,
453 dir,
454 recordNewSnapshots,
455 relFile,
456 reportFile,
457 snapFile,
458 snapPath,
459 snapshotsByHash: snapshotsByHash || new Map()
460 });
461}
462
463exports.load = load;