397 lines
11 KiB
JavaScript
397 lines
11 KiB
JavaScript
|
'use strict';
|
||
|
|
||
|
const crypto = require('crypto');
|
||
|
const fs = require('fs');
|
||
|
const path = require('path');
|
||
|
const zlib = require('zlib');
|
||
|
|
||
|
const writeFileAtomic = require('@ava/write-file-atomic');
|
||
|
const concordance = require('concordance');
|
||
|
const indentString = require('indent-string');
|
||
|
const makeDir = require('make-dir');
|
||
|
const md5Hex = require('md5-hex');
|
||
|
const Buffer = require('safe-buffer').Buffer;
|
||
|
|
||
|
const concordanceOptions = require('./concordance-options').snapshotManager;
|
||
|
|
||
|
// Increment if encoding layout or Concordance serialization versions change. Previous AVA versions will not be able to
|
||
|
// decode buffers generated by a newer version, so changing this value will require a major version bump of AVA itself.
|
||
|
// The version is encoded as an unsigned 16 bit integer.
|
||
|
const VERSION = 1;
|
||
|
|
||
|
const VERSION_HEADER = Buffer.alloc(2);
|
||
|
VERSION_HEADER.writeUInt16LE(VERSION);
|
||
|
|
||
|
// The decoder matches on the trailing newline byte (0x0A).
|
||
|
const READABLE_PREFIX = Buffer.from(`AVA Snapshot v${VERSION}\n`, 'ascii');
|
||
|
const REPORT_SEPARATOR = Buffer.from('\n\n', 'ascii');
|
||
|
const REPORT_TRAILING_NEWLINE = Buffer.from('\n', 'ascii');
|
||
|
|
||
|
const MD5_HASH_LENGTH = 16;
|
||
|
|
||
|
class SnapshotError extends Error {
|
||
|
constructor(message, snapPath) {
|
||
|
super(message);
|
||
|
this.name = 'SnapshotError';
|
||
|
this.snapPath = snapPath;
|
||
|
}
|
||
|
}
|
||
|
exports.SnapshotError = SnapshotError;
|
||
|
|
||
|
class ChecksumError extends SnapshotError {
|
||
|
constructor(snapPath) {
|
||
|
super('Checksum mismatch', snapPath);
|
||
|
this.name = 'ChecksumError';
|
||
|
}
|
||
|
}
|
||
|
exports.ChecksumError = ChecksumError;
|
||
|
|
||
|
class VersionMismatchError extends SnapshotError {
|
||
|
constructor(snapPath, version) {
|
||
|
super('Unexpected snapshot version', snapPath);
|
||
|
this.name = 'VersionMismatchError';
|
||
|
this.snapVersion = version;
|
||
|
this.expectedVersion = VERSION;
|
||
|
}
|
||
|
}
|
||
|
exports.VersionMismatchError = VersionMismatchError;
|
||
|
|
||
|
const LEGACY_SNAPSHOT_HEADER = Buffer.from('// Jest Snapshot v1');
|
||
|
function isLegacySnapshot(buffer) {
|
||
|
return LEGACY_SNAPSHOT_HEADER.equals(buffer.slice(0, LEGACY_SNAPSHOT_HEADER.byteLength));
|
||
|
}
|
||
|
|
||
|
class LegacyError extends SnapshotError {
|
||
|
constructor(snapPath) {
|
||
|
super('Legacy snapshot file', snapPath);
|
||
|
this.name = 'LegacyError';
|
||
|
}
|
||
|
}
|
||
|
exports.LegacyError = LegacyError;
|
||
|
|
||
|
function tryRead(file) {
|
||
|
try {
|
||
|
return fs.readFileSync(file);
|
||
|
} catch (err) {
|
||
|
if (err.code === 'ENOENT') {
|
||
|
return null;
|
||
|
}
|
||
|
|
||
|
throw err;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function withoutLineEndings(buffer) {
|
||
|
let newLength = buffer.byteLength - 1;
|
||
|
while (buffer[newLength] === 0x0A || buffer[newLength] === 0x0D) {
|
||
|
newLength--;
|
||
|
}
|
||
|
return buffer.slice(0, newLength);
|
||
|
}
|
||
|
|
||
|
function formatEntry(label, descriptor) {
|
||
|
if (label) {
|
||
|
label = `> ${label}\n\n`;
|
||
|
}
|
||
|
const codeBlock = indentString(concordance.formatDescriptor(descriptor, concordanceOptions), 4);
|
||
|
return Buffer.from(label + codeBlock, 'utf8');
|
||
|
}
|
||
|
|
||
|
function combineEntries(entries) {
|
||
|
const buffers = [];
|
||
|
let byteLength = 0;
|
||
|
|
||
|
const sortedKeys = Array.from(entries.keys()).sort();
|
||
|
for (const key of sortedKeys) {
|
||
|
const keyBuffer = Buffer.from(`\n\n## ${key}\n\n`, 'utf8');
|
||
|
buffers.push(keyBuffer);
|
||
|
byteLength += keyBuffer.byteLength;
|
||
|
|
||
|
const formattedEntries = entries.get(key);
|
||
|
const last = formattedEntries[formattedEntries.length - 1];
|
||
|
for (const entry of formattedEntries) {
|
||
|
buffers.push(entry);
|
||
|
byteLength += entry.byteLength;
|
||
|
|
||
|
if (entry !== last) {
|
||
|
buffers.push(REPORT_SEPARATOR);
|
||
|
byteLength += REPORT_SEPARATOR.byteLength;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return {buffers, byteLength};
|
||
|
}
|
||
|
|
||
|
function generateReport(relFile, snapFile, entries) {
|
||
|
const combined = combineEntries(entries);
|
||
|
const buffers = combined.buffers;
|
||
|
let byteLength = combined.byteLength;
|
||
|
|
||
|
const header = Buffer.from(`# Snapshot report for \`${relFile}\`
|
||
|
|
||
|
The actual snapshot is saved in \`${snapFile}\`.
|
||
|
|
||
|
Generated by [AVA](https://ava.li).`, 'utf8');
|
||
|
buffers.unshift(header);
|
||
|
byteLength += header.byteLength;
|
||
|
|
||
|
buffers.push(REPORT_TRAILING_NEWLINE);
|
||
|
byteLength += REPORT_TRAILING_NEWLINE.byteLength;
|
||
|
return Buffer.concat(buffers, byteLength);
|
||
|
}
|
||
|
|
||
|
function appendReportEntries(existingReport, entries) {
|
||
|
const combined = combineEntries(entries);
|
||
|
const buffers = combined.buffers;
|
||
|
let byteLength = combined.byteLength;
|
||
|
|
||
|
const prepend = withoutLineEndings(existingReport);
|
||
|
buffers.unshift(prepend);
|
||
|
byteLength += prepend.byteLength;
|
||
|
|
||
|
return Buffer.concat(buffers, byteLength);
|
||
|
}
|
||
|
|
||
|
function encodeSnapshots(buffersByHash) {
|
||
|
const buffers = [];
|
||
|
let byteOffset = 0;
|
||
|
|
||
|
// Entry start and end pointers are relative to the header length. This means
|
||
|
// it's possible to append new entries to an existing snapshot file, without
|
||
|
// having to rewrite pointers for existing entries.
|
||
|
const headerLength = Buffer.alloc(4);
|
||
|
buffers.push(headerLength);
|
||
|
byteOffset += 4;
|
||
|
|
||
|
// Allows 65535 hashes (tests or identified snapshots) per file.
|
||
|
const numHashes = Buffer.alloc(2);
|
||
|
numHashes.writeUInt16LE(buffersByHash.size);
|
||
|
buffers.push(numHashes);
|
||
|
byteOffset += 2;
|
||
|
|
||
|
const entries = [];
|
||
|
for (const pair of buffersByHash) {
|
||
|
const hash = pair[0];
|
||
|
const snapshotBuffers = pair[1];
|
||
|
|
||
|
buffers.push(Buffer.from(hash, 'hex'));
|
||
|
byteOffset += MD5_HASH_LENGTH;
|
||
|
|
||
|
// Allows 65535 snapshots per hash.
|
||
|
const numSnapshots = Buffer.alloc(2);
|
||
|
numSnapshots.writeUInt16LE(snapshotBuffers.length, 0);
|
||
|
buffers.push(numSnapshots);
|
||
|
byteOffset += 2;
|
||
|
|
||
|
for (const value of snapshotBuffers) {
|
||
|
// Each pointer is 32 bits, restricting the total, uncompressed buffer to
|
||
|
// 4 GiB.
|
||
|
const start = Buffer.alloc(4);
|
||
|
const end = Buffer.alloc(4);
|
||
|
entries.push({start, end, value});
|
||
|
|
||
|
buffers.push(start, end);
|
||
|
byteOffset += 8;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
headerLength.writeUInt32LE(byteOffset, 0);
|
||
|
|
||
|
let bodyOffset = 0;
|
||
|
for (const entry of entries) {
|
||
|
const start = bodyOffset;
|
||
|
const end = bodyOffset + entry.value.byteLength;
|
||
|
entry.start.writeUInt32LE(start, 0);
|
||
|
entry.end.writeUInt32LE(end, 0);
|
||
|
buffers.push(entry.value);
|
||
|
bodyOffset = end;
|
||
|
}
|
||
|
byteOffset += bodyOffset;
|
||
|
|
||
|
const compressed = zlib.gzipSync(Buffer.concat(buffers, byteOffset));
|
||
|
const md5sum = crypto.createHash('md5').update(compressed).digest();
|
||
|
return Buffer.concat([
|
||
|
READABLE_PREFIX,
|
||
|
VERSION_HEADER,
|
||
|
md5sum,
|
||
|
compressed
|
||
|
], READABLE_PREFIX.byteLength + VERSION_HEADER.byteLength + MD5_HASH_LENGTH + compressed.byteLength);
|
||
|
}
|
||
|
|
||
|
function decodeSnapshots(buffer, snapPath) {
|
||
|
if (isLegacySnapshot(buffer)) {
|
||
|
throw new LegacyError(snapPath);
|
||
|
}
|
||
|
|
||
|
// The version starts after the readable prefix, which is ended by a newline
|
||
|
// byte (0x0A).
|
||
|
const versionOffset = buffer.indexOf(0x0A) + 1;
|
||
|
const version = buffer.readUInt16LE(versionOffset);
|
||
|
if (version !== VERSION) {
|
||
|
throw new VersionMismatchError(snapPath, version);
|
||
|
}
|
||
|
|
||
|
const md5sumOffset = versionOffset + 2;
|
||
|
const compressedOffset = md5sumOffset + MD5_HASH_LENGTH;
|
||
|
const compressed = buffer.slice(compressedOffset);
|
||
|
|
||
|
const md5sum = crypto.createHash('md5').update(compressed).digest();
|
||
|
const expectedSum = buffer.slice(md5sumOffset, compressedOffset);
|
||
|
if (!md5sum.equals(expectedSum)) {
|
||
|
throw new ChecksumError(snapPath);
|
||
|
}
|
||
|
|
||
|
const decompressed = zlib.gunzipSync(compressed);
|
||
|
let byteOffset = 0;
|
||
|
|
||
|
const headerLength = decompressed.readUInt32LE(byteOffset);
|
||
|
byteOffset += 4;
|
||
|
|
||
|
const snapshotsByHash = new Map();
|
||
|
const numHashes = decompressed.readUInt16LE(byteOffset);
|
||
|
byteOffset += 2;
|
||
|
|
||
|
for (let count = 0; count < numHashes; count++) {
|
||
|
const hash = decompressed.toString('hex', byteOffset, byteOffset + MD5_HASH_LENGTH);
|
||
|
byteOffset += MD5_HASH_LENGTH;
|
||
|
|
||
|
const numSnapshots = decompressed.readUInt16LE(byteOffset);
|
||
|
byteOffset += 2;
|
||
|
|
||
|
const snapshotsBuffers = new Array(numSnapshots);
|
||
|
for (let index = 0; index < numSnapshots; index++) {
|
||
|
const start = decompressed.readUInt32LE(byteOffset) + headerLength;
|
||
|
byteOffset += 4;
|
||
|
const end = decompressed.readUInt32LE(byteOffset) + headerLength;
|
||
|
byteOffset += 4;
|
||
|
snapshotsBuffers[index] = decompressed.slice(start, end);
|
||
|
}
|
||
|
|
||
|
// Allow for new entries to be appended to an existing header, which could
|
||
|
// lead to the same hash being present multiple times.
|
||
|
if (snapshotsByHash.has(hash)) {
|
||
|
snapshotsByHash.set(hash, snapshotsByHash.get(hash).concat(snapshotsBuffers));
|
||
|
} else {
|
||
|
snapshotsByHash.set(hash, snapshotsBuffers);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return snapshotsByHash;
|
||
|
}
|
||
|
|
||
|
class Manager {
|
||
|
constructor(options) {
|
||
|
this.appendOnly = options.appendOnly;
|
||
|
this.dir = options.dir;
|
||
|
this.relFile = options.relFile;
|
||
|
this.reportFile = options.reportFile;
|
||
|
this.snapFile = options.snapFile;
|
||
|
this.snapPath = options.snapPath;
|
||
|
this.snapshotsByHash = options.snapshotsByHash;
|
||
|
|
||
|
this.hasChanges = false;
|
||
|
this.reportEntries = new Map();
|
||
|
}
|
||
|
|
||
|
compare(options) {
|
||
|
const hash = md5Hex(options.belongsTo);
|
||
|
const entries = this.snapshotsByHash.get(hash) || [];
|
||
|
if (options.index > entries.length) {
|
||
|
throw new RangeError(`Cannot record snapshot ${options.index} for ${JSON.stringify(options.belongsTo)}, exceeds expected index of ${entries.length}`);
|
||
|
}
|
||
|
if (options.index === entries.length) {
|
||
|
this.record(hash, options);
|
||
|
return {pass: true};
|
||
|
}
|
||
|
|
||
|
const snapshotBuffer = entries[options.index];
|
||
|
const actual = concordance.deserialize(snapshotBuffer, concordanceOptions);
|
||
|
|
||
|
const expected = concordance.describe(options.expected, concordanceOptions);
|
||
|
const pass = concordance.compareDescriptors(actual, expected);
|
||
|
|
||
|
return {actual, expected, pass};
|
||
|
}
|
||
|
|
||
|
record(hash, options) {
|
||
|
const descriptor = concordance.describe(options.expected, concordanceOptions);
|
||
|
|
||
|
this.hasChanges = true;
|
||
|
const snapshot = concordance.serialize(descriptor);
|
||
|
if (this.snapshotsByHash.has(hash)) {
|
||
|
this.snapshotsByHash.get(hash).push(snapshot);
|
||
|
} else {
|
||
|
this.snapshotsByHash.set(hash, [snapshot]);
|
||
|
}
|
||
|
|
||
|
const entry = formatEntry(options.label, descriptor);
|
||
|
if (this.reportEntries.has(options.belongsTo)) {
|
||
|
this.reportEntries.get(options.belongsTo).push(entry);
|
||
|
} else {
|
||
|
this.reportEntries.set(options.belongsTo, [entry]);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
save() {
|
||
|
if (!this.hasChanges) {
|
||
|
return null;
|
||
|
}
|
||
|
|
||
|
const snapPath = this.snapPath;
|
||
|
const buffer = encodeSnapshots(this.snapshotsByHash);
|
||
|
|
||
|
const reportPath = path.join(this.dir, this.reportFile);
|
||
|
const existingReport = this.appendOnly ? tryRead(reportPath) : null;
|
||
|
const reportBuffer = existingReport ?
|
||
|
appendReportEntries(existingReport, this.reportEntries) :
|
||
|
generateReport(this.relFile, this.snapFile, this.reportEntries);
|
||
|
|
||
|
makeDir.sync(this.dir);
|
||
|
const tmpSnapPath = writeFileAtomic.sync(snapPath, buffer);
|
||
|
const tmpReportPath = writeFileAtomic.sync(reportPath, reportBuffer);
|
||
|
|
||
|
return [tmpSnapPath, tmpReportPath, snapPath, reportPath];
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function determineSnapshotDir(projectDir, testDir) {
|
||
|
const parts = new Set(path.relative(projectDir, testDir).split(path.sep));
|
||
|
if (parts.has('__tests__')) {
|
||
|
return path.join(testDir, '__snapshots__');
|
||
|
} else if (parts.has('test') || parts.has('tests')) { // Accept tests, even though it's not in the default test patterns
|
||
|
return path.join(testDir, 'snapshots');
|
||
|
}
|
||
|
return testDir;
|
||
|
}
|
||
|
|
||
|
function load(options) {
|
||
|
const dir = determineSnapshotDir(options.projectDir, options.testDir);
|
||
|
const reportFile = `${options.name}.md`;
|
||
|
const snapFile = `${options.name}.snap`;
|
||
|
const snapPath = path.join(dir, snapFile);
|
||
|
|
||
|
let appendOnly = !options.updating;
|
||
|
let snapshotsByHash;
|
||
|
|
||
|
if (!options.updating) {
|
||
|
const buffer = tryRead(snapPath);
|
||
|
if (buffer) {
|
||
|
snapshotsByHash = decodeSnapshots(buffer, snapPath);
|
||
|
} else {
|
||
|
appendOnly = false;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return new Manager({
|
||
|
appendOnly,
|
||
|
dir,
|
||
|
relFile: options.relFile,
|
||
|
reportFile,
|
||
|
snapFile,
|
||
|
snapPath,
|
||
|
snapshotsByHash: snapshotsByHash || new Map()
|
||
|
});
|
||
|
}
|
||
|
exports.load = load;
|