Upgrade Ava to v4

This commit is contained in:
Henry Mercer 2022-02-01 18:01:11 +00:00
parent 9a40cc5274
commit ce89f1b611
1153 changed files with 27264 additions and 95308 deletions

View file

@ -1,24 +1,24 @@
'use strict';
import {Buffer} from 'node:buffer';
import crypto from 'node:crypto';
import fs from 'node:fs';
import {findSourceMap} from 'node:module';
import path from 'node:path';
import {fileURLToPath} from 'node:url';
import zlib from 'node:zlib';
const crypto = require('crypto');
const fs = require('fs');
const path = require('path');
const zlib = require('zlib');
import cbor from 'cbor';
import concordance from 'concordance';
import indentString from 'indent-string';
import mem from 'mem';
import slash from 'slash';
import writeFileAtomic from 'write-file-atomic';
const concordance = require('concordance');
const indentString = require('indent-string');
const md5Hex = require('md5-hex');
const convertSourceMap = require('convert-source-map');
const slash = require('slash');
const writeFileAtomic = require('write-file-atomic');
const mem = require('mem');
const concordanceOptions = require('./concordance-options').snapshotManager;
import {snapshotManager as concordanceOptions} from './concordance-options.js';
// Increment if encoding layout or Concordance serialization versions change. Previous AVA versions will not be able to
// decode buffers generated by a newer version, so changing this value will require a major version bump of AVA itself.
// The version is encoded as an unsigned 16 bit integer.
const VERSION = 2;
const VERSION = 3;
const VERSION_HEADER = Buffer.alloc(2);
VERSION_HEADER.writeUInt16LE(VERSION);
@ -28,26 +28,24 @@ const READABLE_PREFIX = Buffer.from(`AVA Snapshot v${VERSION}\n`, 'ascii');
const REPORT_SEPARATOR = Buffer.from('\n\n', 'ascii');
const REPORT_TRAILING_NEWLINE = Buffer.from('\n', 'ascii');
const MD5_HASH_LENGTH = 16;
const SHA_256_HASH_LENGTH = 32;
class SnapshotError extends Error {
export class SnapshotError extends Error {
constructor(message, snapPath) {
super(message);
this.name = 'SnapshotError';
this.snapPath = snapPath;
}
}
exports.SnapshotError = SnapshotError;
class ChecksumError extends SnapshotError {
export class ChecksumError extends SnapshotError {
constructor(snapPath) {
super('Checksum mismatch', snapPath);
this.name = 'ChecksumError';
}
}
exports.ChecksumError = ChecksumError;
class VersionMismatchError extends SnapshotError {
export class VersionMismatchError extends SnapshotError {
constructor(snapPath, version) {
super('Unexpected snapshot version', snapPath);
this.name = 'VersionMismatchError';
@ -55,20 +53,25 @@ class VersionMismatchError extends SnapshotError {
this.expectedVersion = VERSION;
}
}
exports.VersionMismatchError = VersionMismatchError;
export class InvalidSnapshotError extends SnapshotError {
constructor(snapPath) {
super('Invalid snapshot file', snapPath);
this.name = 'InvalidSnapshotError';
}
}
const LEGACY_SNAPSHOT_HEADER = Buffer.from('// Jest Snapshot v1');
function isLegacySnapshot(buffer) {
return LEGACY_SNAPSHOT_HEADER.equals(buffer.slice(0, LEGACY_SNAPSHOT_HEADER.byteLength));
}
class LegacyError extends SnapshotError {
export class LegacyError extends SnapshotError {
constructor(snapPath) {
super('Legacy snapshot file', snapPath);
this.name = 'LegacyError';
}
}
exports.LegacyError = LegacyError;
function tryRead(file) {
try {
@ -82,168 +85,117 @@ function tryRead(file) {
}
}
function withoutLineEndings(buffer) {
let checkPosition = buffer.byteLength - 1;
while (buffer[checkPosition] === 0x0A || buffer[checkPosition] === 0x0D) {
checkPosition--;
}
function formatEntry(snapshot, index) {
const {
data,
label = `Snapshot ${index + 1}`, // Human-readable labels start counting at 1.
} = snapshot;
return buffer.slice(0, checkPosition + 1);
const description = data
? concordance.formatDescriptor(concordance.deserialize(data), concordanceOptions)
: '<No Data>';
const blockquote = label.split(/\n/).map(line => '> ' + line).join('\n');
return `${blockquote}\n\n${indentString(description, 4)}`;
}
function formatEntry(label, descriptor) {
if (label) {
label = `> ${label}\n\n`;
}
function combineEntries({blocks}) {
const combined = new BufferBuilder();
const codeBlock = indentString(concordance.formatDescriptor(descriptor, concordanceOptions), 4);
return Buffer.from(label + codeBlock, 'utf8');
}
for (const {title, snapshots} of blocks) {
const last = snapshots[snapshots.length - 1];
combined.write(`\n\n## ${title}\n\n`);
function combineEntries(entries) {
const buffers = [];
let byteLength = 0;
for (const [index, snapshot] of snapshots.entries()) {
combined.write(formatEntry(snapshot, index));
const sortedKeys = [...entries.keys()].sort((keyA, keyB) => {
const [a, b] = [entries.get(keyA), entries.get(keyB)];
const taskDifference = a.taskIndex - b.taskIndex;
if (taskDifference !== 0) {
return taskDifference;
}
const [assocA, assocB] = [a.associatedTaskIndex, b.associatedTaskIndex];
if (assocA !== undefined && assocB !== undefined) {
const assocDifference = assocA - assocB;
if (assocDifference !== 0) {
return assocDifference;
}
}
return a.snapIndex - b.snapIndex;
});
for (const key of sortedKeys) {
const keyBuffer = Buffer.from(`\n\n## ${key}\n\n`, 'utf8');
buffers.push(keyBuffer);
byteLength += keyBuffer.byteLength;
const formattedEntries = entries.get(key).buffers;
const last = formattedEntries[formattedEntries.length - 1];
for (const entry of formattedEntries) {
buffers.push(entry);
byteLength += entry.byteLength;
if (entry !== last) {
buffers.push(REPORT_SEPARATOR);
byteLength += REPORT_SEPARATOR.byteLength;
if (snapshot !== last) {
combined.write(REPORT_SEPARATOR);
}
}
}
return {buffers, byteLength};
return combined;
}
function generateReport(relFile, snapFile, entries) {
const combined = combineEntries(entries);
const {buffers} = combined;
let {byteLength} = combined;
const header = Buffer.from(`# Snapshot report for \`${slash(relFile)}\`
function generateReport(relFile, snapFile, snapshots) {
return new BufferBuilder()
.write(`# Snapshot report for \`${slash(relFile)}\`
The actual snapshot is saved in \`${snapFile}\`.
Generated by [AVA](https://avajs.dev).`, 'utf8');
buffers.unshift(header);
byteLength += header.byteLength;
buffers.push(REPORT_TRAILING_NEWLINE);
byteLength += REPORT_TRAILING_NEWLINE.byteLength;
return Buffer.concat(buffers, byteLength);
Generated by [AVA](https://avajs.dev).`)
.append(combineEntries(snapshots))
.write(REPORT_TRAILING_NEWLINE)
.toBuffer();
}
function appendReportEntries(existingReport, entries) {
const combined = combineEntries(entries);
const {buffers} = combined;
let {byteLength} = combined;
class BufferBuilder {
constructor() {
this.buffers = [];
this.byteOffset = 0;
}
const prepend = withoutLineEndings(existingReport);
buffers.unshift(prepend);
byteLength += prepend.byteLength;
append(builder) {
this.buffers.push(...builder.buffers);
this.byteOffset += builder.byteOffset;
return this;
}
buffers.push(REPORT_TRAILING_NEWLINE);
byteLength += REPORT_TRAILING_NEWLINE.byteLength;
return Buffer.concat(buffers, byteLength);
}
function encodeSnapshots(buffersByHash) {
const buffers = [];
let byteOffset = 0;
// Entry start and end pointers are relative to the header length. This means
// it's possible to append new entries to an existing snapshot file, without
// having to rewrite pointers for existing entries.
const headerLength = Buffer.alloc(4);
buffers.push(headerLength);
byteOffset += 4;
// Allows 65535 hashes (tests or identified snapshots) per file.
const numberHashes = Buffer.alloc(2);
numberHashes.writeUInt16LE(buffersByHash.size);
buffers.push(numberHashes);
byteOffset += 2;
const entries = [];
// Maps can't have duplicate keys, so all items in [...buffersByHash.keys()]
// are unique, so sortedHashes should be deterministic.
const sortedHashes = [...buffersByHash.keys()].sort();
const sortedBuffersByHash = [...sortedHashes.map(hash => [hash, buffersByHash.get(hash)])];
for (const [hash, snapshotBuffers] of sortedBuffersByHash) {
buffers.push(Buffer.from(hash, 'hex'));
byteOffset += MD5_HASH_LENGTH;
// Allows 65535 snapshots per hash.
const numberSnapshots = Buffer.alloc(2);
numberSnapshots.writeUInt16LE(snapshotBuffers.length, 0);
buffers.push(numberSnapshots);
byteOffset += 2;
for (const value of snapshotBuffers) {
// Each pointer is 32 bits, restricting the total, uncompressed buffer to
// 4 GiB.
const start = Buffer.alloc(4);
const end = Buffer.alloc(4);
entries.push({start, end, value});
buffers.push(start, end);
byteOffset += 8;
write(data) {
if (typeof data === 'string') {
this.write(Buffer.from(data, 'utf8'));
} else {
this.buffers.push(data);
this.byteOffset += data.byteLength;
}
return this;
}
headerLength.writeUInt32LE(byteOffset, 0);
let bodyOffset = 0;
for (const entry of entries) {
const start = bodyOffset;
const end = bodyOffset + entry.value.byteLength;
entry.start.writeUInt32LE(start, 0);
entry.end.writeUInt32LE(end, 0);
buffers.push(entry.value);
bodyOffset = end;
toBuffer() {
return Buffer.concat(this.buffers, this.byteOffset);
}
}
byteOffset += bodyOffset;
function sortBlocks(blocksByTitle, blockIndices) {
return [...blocksByTitle].sort(
([aTitle], [bTitle]) => {
const a = blockIndices.get(aTitle);
const b = blockIndices.get(bTitle);
const compressed = zlib.gzipSync(Buffer.concat(buffers, byteOffset));
if (a === undefined) {
if (b === undefined) {
return 0;
}
return 1;
}
if (b === undefined) {
return -1;
}
return a - b;
},
);
}
async function encodeSnapshots(snapshotData) {
const encoded = await cbor.encodeAsync(snapshotData, {
omitUndefinedProperties: true,
canonical: true,
});
const compressed = zlib.gzipSync(encoded);
compressed[9] = 0x03; // Override the GZip header containing the OS to always be Linux
const md5sum = crypto.createHash('md5').update(compressed).digest();
const sha256sum = crypto.createHash('sha256').update(compressed).digest();
return Buffer.concat([
READABLE_PREFIX,
VERSION_HEADER,
md5sum,
compressed
], READABLE_PREFIX.byteLength + VERSION_HEADER.byteLength + MD5_HASH_LENGTH + compressed.byteLength);
sha256sum,
compressed,
], READABLE_PREFIX.byteLength + VERSION_HEADER.byteLength + SHA_256_HASH_LENGTH + compressed.byteLength);
}
function decodeSnapshots(buffer, snapPath) {
@ -253,182 +205,207 @@ function decodeSnapshots(buffer, snapPath) {
// The version starts after the readable prefix, which is ended by a newline
// byte (0x0A).
const versionOffset = buffer.indexOf(0x0A) + 1;
const newline = buffer.indexOf(0x0A);
if (newline === -1) {
throw new InvalidSnapshotError(snapPath);
}
const versionOffset = newline + 1;
const version = buffer.readUInt16LE(versionOffset);
if (version !== VERSION) {
throw new VersionMismatchError(snapPath, version);
}
const md5sumOffset = versionOffset + 2;
const compressedOffset = md5sumOffset + MD5_HASH_LENGTH;
const sha256sumOffset = versionOffset + 2;
const compressedOffset = sha256sumOffset + SHA_256_HASH_LENGTH;
const compressed = buffer.slice(compressedOffset);
const md5sum = crypto.createHash('md5').update(compressed).digest();
const expectedSum = buffer.slice(md5sumOffset, compressedOffset);
if (!md5sum.equals(expectedSum)) {
const sha256sum = crypto.createHash('sha256').update(compressed).digest();
const expectedSum = buffer.slice(sha256sumOffset, compressedOffset);
if (!sha256sum.equals(expectedSum)) {
throw new ChecksumError(snapPath);
}
const decompressed = zlib.gunzipSync(compressed);
let byteOffset = 0;
const headerLength = decompressed.readUInt32LE(byteOffset);
byteOffset += 4;
const snapshotsByHash = new Map();
const numberHashes = decompressed.readUInt16LE(byteOffset);
byteOffset += 2;
for (let count = 0; count < numberHashes; count++) {
const hash = decompressed.toString('hex', byteOffset, byteOffset + MD5_HASH_LENGTH);
byteOffset += MD5_HASH_LENGTH;
const numberSnapshots = decompressed.readUInt16LE(byteOffset);
byteOffset += 2;
const snapshotsBuffers = new Array(numberSnapshots);
for (let index = 0; index < numberSnapshots; index++) {
const start = decompressed.readUInt32LE(byteOffset) + headerLength;
byteOffset += 4;
const end = decompressed.readUInt32LE(byteOffset) + headerLength;
byteOffset += 4;
snapshotsBuffers[index] = decompressed.slice(start, end);
}
// Allow for new entries to be appended to an existing header, which could
// lead to the same hash being present multiple times.
if (snapshotsByHash.has(hash)) {
snapshotsByHash.set(hash, snapshotsByHash.get(hash).concat(snapshotsBuffers));
} else {
snapshotsByHash.set(hash, snapshotsBuffers);
}
}
return snapshotsByHash;
return cbor.decode(decompressed);
}
class Manager {
constructor(options) {
this.appendOnly = options.appendOnly;
this.dir = options.dir;
this.recordNewSnapshots = options.recordNewSnapshots;
this.updating = options.updating;
this.relFile = options.relFile;
this.reportFile = options.reportFile;
this.reportPath = options.reportPath;
this.snapFile = options.snapFile;
this.snapPath = options.snapPath;
this.snapshotsByHash = options.snapshotsByHash;
this.oldBlocksByTitle = options.oldBlocksByTitle;
this.newBlocksByTitle = options.newBlocksByTitle;
this.blockIndices = new Map();
this.error = options.error;
this.hasChanges = false;
this.reportEntries = new Map();
}
touch(title, taskIndex) {
this.blockIndices.set(title, taskIndex);
}
compare(options) {
const hash = md5Hex(options.belongsTo);
const entries = this.snapshotsByHash.get(hash) || [];
const snapshotBuffer = entries[options.index];
if (this.error) {
throw this.error;
}
if (!snapshotBuffer) {
const block = this.newBlocksByTitle.get(options.belongsTo);
const snapshot = block && block.snapshots[options.index];
const data = snapshot && snapshot.data;
if (!data) {
if (!this.recordNewSnapshots) {
return {pass: false};
}
if (options.deferRecording) {
const record = this.deferRecord(hash, options);
const record = this.deferRecord(options);
return {pass: true, record};
}
this.record(hash, options);
this.record(options);
return {pass: true};
}
const actual = concordance.deserialize(snapshotBuffer, concordanceOptions);
const actual = concordance.deserialize(data, concordanceOptions);
const expected = concordance.describe(options.expected, concordanceOptions);
const pass = concordance.compareDescriptors(actual, expected);
return {actual, expected, pass};
}
deferRecord(hash, options) {
const descriptor = concordance.describe(options.expected, concordanceOptions);
const snapshot = concordance.serialize(descriptor);
const entry = formatEntry(options.label, descriptor);
const {taskIndex, snapIndex, associatedTaskIndex} = options;
recordSerialized({data, label, belongsTo, index}) {
let block = this.newBlocksByTitle.get(belongsTo);
if (!block) {
block = {snapshots: []};
}
const {snapshots} = block;
if (index > snapshots.length) {
throw new RangeError(`Cannot record snapshot ${index} for ${JSON.stringify(belongsTo)}, exceeds expected index of ${snapshots.length}`);
} else if (index < snapshots.length) {
if (snapshots[index].data) {
throw new RangeError(`Cannot record snapshot ${index} for ${JSON.stringify(belongsTo)}, already exists`);
}
snapshots[index] = {data, label};
} else {
snapshots.push({data, label});
}
this.newBlocksByTitle.set(belongsTo, block);
}
deferRecord(options) {
const {expected, belongsTo, label, index} = options;
const descriptor = concordance.describe(expected, concordanceOptions);
const data = concordance.serialize(descriptor);
return () => { // Must be called in order!
this.hasChanges = true;
let snapshots = this.snapshotsByHash.get(hash);
if (!snapshots) {
snapshots = [];
this.snapshotsByHash.set(hash, snapshots);
}
if (options.index > snapshots.length) {
throw new RangeError(`Cannot record snapshot ${options.index} for ${JSON.stringify(options.belongsTo)}, exceeds expected index of ${snapshots.length}`);
}
if (options.index < snapshots.length) {
throw new RangeError(`Cannot record snapshot ${options.index} for ${JSON.stringify(options.belongsTo)}, already exists`);
}
snapshots.push(snapshot);
if (this.reportEntries.has(options.belongsTo)) {
this.reportEntries.get(options.belongsTo).buffers.push(entry);
} else {
this.reportEntries.set(options.belongsTo, {buffers: [entry], taskIndex, snapIndex, associatedTaskIndex});
}
this.recordSerialized({data, label, belongsTo, index});
};
}
record(hash, options) {
const record = this.deferRecord(hash, options);
record(options) {
const record = this.deferRecord(options);
record();
}
save() {
skipBlock(title) {
const block = this.oldBlocksByTitle.get(title);
if (block) {
this.newBlocksByTitle.set(title, block);
}
}
skipSnapshot({belongsTo, index, deferRecording}) {
const oldBlock = this.oldBlocksByTitle.get(belongsTo);
let snapshot = oldBlock && oldBlock.snapshots[index];
if (!snapshot) {
snapshot = {};
}
// Retain the label from the old snapshot, so as not to assume that the
// snapshot.skip() arguments are well-formed.
// Defer recording if called in a try().
if (deferRecording) {
return () => { // Must be called in order!
this.recordSerialized({belongsTo, index, ...snapshot});
};
}
this.recordSerialized({belongsTo, index, ...snapshot});
}
async save() {
const {dir, relFile, snapFile, snapPath, reportPath} = this;
if (this.updating && this.newBlocksByTitle.size === 0) {
return {
changedFiles: [cleanFile(snapPath), cleanFile(reportPath)].flat(),
temporaryFiles: [],
};
}
if (!this.hasChanges) {
return null;
}
const {snapPath} = this;
const buffer = encodeSnapshots(this.snapshotsByHash);
const snapshots = {
blocks: sortBlocks(this.newBlocksByTitle, this.blockIndices).map(
([title, block]) => ({title, ...block}),
),
};
const reportPath = path.join(this.dir, this.reportFile);
const existingReport = this.appendOnly ? tryRead(reportPath) : null;
const reportBuffer = existingReport ?
appendReportEntries(existingReport, this.reportEntries) :
generateReport(this.relFile, this.snapFile, this.reportEntries);
const buffer = await encodeSnapshots(snapshots);
const reportBuffer = generateReport(relFile, snapFile, snapshots);
fs.mkdirSync(this.dir, {recursive: true});
await fs.promises.mkdir(dir, {recursive: true});
const paths = [snapPath, reportPath];
const tmpfileCreated = tmpfile => paths.push(tmpfile);
writeFileAtomic.sync(snapPath, buffer, {tmpfileCreated});
writeFileAtomic.sync(reportPath, reportBuffer, {tmpfileCreated});
return paths;
const temporaryFiles = [];
const tmpfileCreated = file => temporaryFiles.push(file);
await Promise.all([
writeFileAtomic(snapPath, buffer, {tmpfileCreated}),
writeFileAtomic(reportPath, reportBuffer, {tmpfileCreated}),
]);
return {
changedFiles: [snapPath, reportPath],
temporaryFiles,
};
}
}
const resolveSourceFile = mem(file => {
const testDir = path.dirname(file);
const buffer = tryRead(file);
if (!buffer) {
return file; // Assume the file is stubbed in our test suite.
const sourceMap = findSourceMap(file);
if (sourceMap === undefined) {
return file;
}
const source = buffer.toString();
const converter = convertSourceMap.fromSource(source) || convertSourceMap.fromMapFileSource(source, testDir);
if (converter) {
const map = converter.toObject();
const firstSource = `${map.sourceRoot || ''}${map.sources[0]}`;
return path.resolve(testDir, firstSource);
const {payload} = sourceMap;
if (payload.sources.length === 0) { // Hypothetical?
return file;
}
return file;
return payload.sources[0].startsWith('file://')
? fileURLToPath(payload.sources[0])
: payload.sources[0];
});
const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
export const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
const testDir = path.dirname(resolveSourceFile(file));
if (fixedLocation) {
const relativeTestLocation = path.relative(projectDir, testDir);
@ -447,8 +424,6 @@ const determineSnapshotDir = mem(({file, fixedLocation, projectDir}) => {
return testDir;
}, {cacheKey: ([{file}]) => file});
exports.determineSnapshotDir = determineSnapshotDir;
function determineSnapshotPaths({file, fixedLocation, projectDir}) {
const dir = determineSnapshotDir({file, fixedLocation, projectDir});
const relFile = path.relative(projectDir, resolveSourceFile(file));
@ -460,7 +435,9 @@ function determineSnapshotPaths({file, fixedLocation, projectDir}) {
dir,
relFile,
snapFile,
reportFile
reportFile,
snapPath: path.join(dir, snapFile),
reportPath: path.join(dir, reportFile),
};
}
@ -477,45 +454,52 @@ function cleanFile(file) {
}
}
// Remove snapshot and report if they exist. Returns an array containing the
// paths of the touched files.
function cleanSnapshots({file, fixedLocation, projectDir}) {
const {dir, snapFile, reportFile} = determineSnapshotPaths({file, fixedLocation, projectDir});
export function load({file, fixedLocation, projectDir, recordNewSnapshots, updating}) {
// Keep runner unit tests that use `new Runner()` happy
if (file === undefined || projectDir === undefined) {
return new Manager({
recordNewSnapshots,
updating,
oldBlocksByTitle: new Map(),
newBlocksByTitle: new Map(),
});
}
return [
...cleanFile(path.join(dir, snapFile)),
...cleanFile(path.join(dir, reportFile))
];
}
const paths = determineSnapshotPaths({file, fixedLocation, projectDir});
const buffer = tryRead(paths.snapPath);
exports.cleanSnapshots = cleanSnapshots;
if (!buffer) {
return new Manager({
recordNewSnapshots,
updating,
...paths,
oldBlocksByTitle: new Map(),
newBlocksByTitle: new Map(),
});
}
function load({file, fixedLocation, projectDir, recordNewSnapshots, updating}) {
const {dir, relFile, snapFile, reportFile} = determineSnapshotPaths({file, fixedLocation, projectDir});
const snapPath = path.join(dir, snapFile);
let blocksByTitle;
let snapshotError;
let appendOnly = !updating;
let snapshotsByHash;
try {
const data = decodeSnapshots(buffer, paths.snapPath);
blocksByTitle = new Map(data.blocks.map(({title, ...block}) => [title, block]));
} catch (error) {
blocksByTitle = new Map();
if (!updating) {
const buffer = tryRead(snapPath);
if (buffer) {
snapshotsByHash = decodeSnapshots(buffer, snapPath);
} else {
appendOnly = false;
if (!updating) { // Discard all decoding errors when updating snapshots
snapshotError = error instanceof SnapshotError
? error
: new InvalidSnapshotError(paths.snapPath);
}
}
return new Manager({
appendOnly,
dir,
recordNewSnapshots,
relFile,
reportFile,
snapFile,
snapPath,
snapshotsByHash: snapshotsByHash || new Map()
updating,
...paths,
oldBlocksByTitle: blocksByTitle,
newBlocksByTitle: updating ? new Map() : blocksByTitle,
error: snapshotError,
});
}
exports.load = load;