test_runner: use source maps when reporting coverage

PR-URL: https://github.com/nodejs/node/pull/52060
Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Reviewed-By: Benjamin Gruenbaum <benjamingr@gmail.com>
Reviewed-By: Chemi Atlow <chemi@atlow.co.il>
This commit is contained in:
Moshe Atlow 2024-03-15 08:33:42 +02:00 committed by GitHub
parent 6ad5353764
commit 814fa1ae74
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 300 additions and 118 deletions

View File

@ -73,6 +73,7 @@ const {
ArrayPrototypeSort,
ObjectPrototypeHasOwnProperty,
StringPrototypeCharAt,
Symbol,
} = primordials;
const { validateObject } = require('internal/validators');
@ -83,6 +84,8 @@ const VLQ_BASE_SHIFT = 5;
const VLQ_BASE_MASK = (1 << 5) - 1;
const VLQ_CONTINUATION_MASK = 1 << 5;
const kMappings = Symbol('kMappings');
class StringCharIterator {
/**
* @constructor
@ -153,6 +156,10 @@ class SourceMap {
return cloneSourceMapV3(this.#payload);
}
get [kMappings]() {
return this.#mappings;
}
/**
* @return {number[] | undefined} line lengths of generated source code
*/
@ -382,5 +389,6 @@ function compareSourceMapEntry(entry1, entry2) {
}
module.exports = {
kMappings,
SourceMap,
};

View File

@ -6,6 +6,7 @@ const {
JSONParse,
MathFloor,
NumberParseInt,
ObjectAssign,
RegExpPrototypeExec,
RegExpPrototypeSymbolSplit,
SafeMap,
@ -14,6 +15,7 @@ const {
StringPrototypeLocaleCompare,
StringPrototypeStartsWith,
MathMax,
MathMin,
} = primordials;
const {
copyFileSync,
@ -26,6 +28,7 @@ const { setupCoverageHooks } = require('internal/util');
const { tmpdir } = require('os');
const { join, resolve } = require('path');
const { fileURLToPath } = require('internal/url');
const { kMappings, SourceMap } = require('internal/source_map/source_map');
const kCoverageFileRegex = /^coverage-(\d+)-(\d{13})-(\d+)\.json$/;
const kIgnoreRegex = /\/\* node:coverage ignore next (?<count>\d+ )?\*\//;
const kLineEndingRegex = /\r?\n$/u;
@ -33,8 +36,6 @@ const kLineSplitRegex = /(?<=\r?\n)/u;
const kStatusRegex = /\/\* node:coverage (?<status>enable|disable) \*\//;
class CoverageLine {
#covered;
constructor(line, src, startOffset) {
const newlineLength =
RegExpPrototypeExec(kLineEndingRegex, src)?.[0].length ?? 0;
@ -44,21 +45,7 @@ class CoverageLine {
this.startOffset = startOffset;
this.endOffset = startOffset + src.length - newlineLength;
this.ignore = false;
this.count = 0;
this.#covered = true;
}
get covered() {
return this.#covered;
}
set covered(isCovered) {
// V8 can generate multiple ranges that span the same line.
if (!this.#covered) {
return;
}
this.#covered = isCovered;
this.count = this.startOffset === this.endOffset ? 1 : 0;
}
}
@ -69,9 +56,74 @@ class TestCoverage {
this.workingDirectory = workingDirectory;
}
#sourceLines = new SafeMap();
getLines(fileUrl, source) {
// Split the file source into lines. Make sure the lines maintain their
// original line endings because those characters are necessary for
// determining offsets in the file.
if (this.#sourceLines.has(fileUrl)) {
return this.#sourceLines.get(fileUrl);
}
try {
source ??= readFileSync(fileURLToPath(fileUrl), 'utf8');
} catch {
// The file can no longer be read. It may have been deleted among
// other possibilities. Leave it out of the coverage report.
this.#sourceLines.set(fileUrl, null);
return;
}
const linesWithBreaks =
RegExpPrototypeSymbolSplit(kLineSplitRegex, source);
let ignoreCount = 0;
let enabled = true;
let offset = 0;
const lines = ArrayPrototypeMap(linesWithBreaks, (line, i) => {
const startOffset = offset;
const coverageLine = new CoverageLine(i + 1, line, startOffset);
offset += line.length;
// Determine if this line is being ignored.
if (ignoreCount > 0) {
ignoreCount--;
coverageLine.ignore = true;
} else if (!enabled) {
coverageLine.ignore = true;
}
if (!coverageLine.ignore) {
// If this line is not already being ignored, check for ignore
// comments.
const match = RegExpPrototypeExec(kIgnoreRegex, line);
if (match !== null) {
ignoreCount = NumberParseInt(match.groups?.count ?? 1, 10);
}
}
// Check for comments to enable/disable coverage no matter what. These
// take precedence over ignore comments.
const match = RegExpPrototypeExec(kStatusRegex, line);
const status = match?.groups?.status;
if (status) {
ignoreCount = 0;
enabled = status === 'enable';
}
return coverageLine;
});
this.#sourceLines.set(fileUrl, lines);
return lines;
}
summary() {
internalBinding('profiler').takeCoverage();
const coverage = getCoverageFromDirectory(this.coverageDirectory);
const coverage = this.getCoverageFromDirectory();
const coverageSummary = {
__proto__: null,
workingDirectory: this.workingDirectory,
@ -97,25 +149,6 @@ class TestCoverage {
for (let i = 0; i < coverage.length; ++i) {
const { functions, url } = coverage[i];
// Split the file source into lines. Make sure the lines maintain their
// original line endings because those characters are necessary for
// determining offsets in the file.
const filePath = fileURLToPath(url);
let source;
try {
source = readFileSync(filePath, 'utf8');
} catch {
// The file can no longer be read. It may have been deleted among
// other possibilities. Leave it out of the coverage report.
continue;
}
const linesWithBreaks =
RegExpPrototypeSymbolSplit(kLineSplitRegex, source);
let ignoreCount = 0;
let enabled = true;
let offset = 0;
let totalBranches = 0;
let totalFunctions = 0;
let branchesCovered = 0;
@ -123,42 +156,11 @@ class TestCoverage {
const functionReports = [];
const branchReports = [];
const lines = ArrayPrototypeMap(linesWithBreaks, (line, i) => {
const startOffset = offset;
const coverageLine = new CoverageLine(i + 1, line, startOffset);
const lines = this.getLines(url);
if (!lines) {
continue;
}
offset += line.length;
// Determine if this line is being ignored.
if (ignoreCount > 0) {
ignoreCount--;
coverageLine.ignore = true;
} else if (!enabled) {
coverageLine.ignore = true;
}
if (!coverageLine.ignore) {
// If this line is not already being ignored, check for ignore
// comments.
const match = RegExpPrototypeExec(kIgnoreRegex, line);
if (match !== null) {
ignoreCount = NumberParseInt(match.groups?.count ?? 1, 10);
}
}
// Check for comments to enable/disable coverage no matter what. These
// take precedence over ignore comments.
const match = RegExpPrototypeExec(kStatusRegex, line);
const status = match?.groups?.status;
if (status) {
ignoreCount = 0;
enabled = status === 'enable';
}
return coverageLine;
});
for (let j = 0; j < functions.length; ++j) {
const { isBlockCoverage, ranges } = functions[j];
@ -168,7 +170,9 @@ class TestCoverage {
const range = ranges[k];
maxCountPerFunction = MathMax(maxCountPerFunction, range.count);
mapRangeToLines(range, lines);
// Add some useful data to the range. The test runner has read these ranges
// from a file, so we own the data structures and can do what we want.
ObjectAssign(range, mapRangeToLines(range, lines));
if (isBlockCoverage) {
ArrayPrototypePush(branchReports, {
@ -216,14 +220,14 @@ class TestCoverage {
count: line.count,
});
}
if (line.covered || line.ignore) {
if (line.count > 0 || line.ignore) {
coveredCnt++;
}
}
ArrayPrototypePush(coverageSummary.files, {
__proto__: null,
path: filePath,
path: fileURLToPath(url),
totalLineCount: lines.length,
totalBranchCount: totalBranches,
totalFunctionCount: totalFunctions,
@ -289,6 +293,115 @@ class TestCoverage {
}
}
}
getCoverageFromDirectory() {
const result = new SafeMap();
let dir;
try {
dir = opendirSync(this.coverageDirectory);
for (let entry; (entry = dir.readSync()) !== null;) {
if (RegExpPrototypeExec(kCoverageFileRegex, entry.name) === null) {
continue;
}
const coverageFile = join(this.coverageDirectory, entry.name);
const coverage = JSONParse(readFileSync(coverageFile, 'utf8'));
mergeCoverage(result, this.mapCoverageWithSourceMap(coverage));
}
return ArrayFrom(result.values());
} finally {
if (dir) {
dir.closeSync();
}
}
}
mapCoverageWithSourceMap(coverage) {
const { result } = coverage;
const sourceMapCache = coverage['source-map-cache'];
if (!sourceMapCache) {
return result;
}
const newResult = new SafeMap();
for (let i = 0; i < result.length; ++i) {
const script = result[i];
const { url, functions } = script;
if (shouldSkipFileCoverage(url) || sourceMapCache[url] == null) {
newResult.set(url, script);
continue;
}
const originalLines = this.getLines(url);
const { data, lineLengths } = sourceMapCache[url];
if (data.sourcesContent != null) {
for (let j = 0; j < data.sources.length; ++j) {
this.getLines(data.sources[j], data.sourcesContent[j]);
}
}
const sourceMap = new SourceMap(data, { __proto__: null, lineLengths });
for (let j = 0; j < functions.length; ++j) {
const { ranges, functionName, isBlockCoverage } = functions[j];
if (ranges == null) {
continue;
}
let newUrl;
const newRanges = [];
for (let k = 0; k < ranges.length; ++k) {
const { startOffset, endOffset, count } = ranges[k];
const { lines } = mapRangeToLines(ranges[k], originalLines);
let startEntry = sourceMap
.findEntry(lines[0].line - 1, MathMax(0, startOffset - lines[0].startOffset));
const endEntry = sourceMap
.findEntry(lines[lines.length - 1].line - 1, (endOffset - lines[lines.length - 1].startOffset) - 1);
if (!startEntry.originalSource && endEntry.originalSource &&
lines[0].line === 1 && startOffset === 0 && lines[0].startOffset === 0) {
// Edge case when the first line is not mappable
const { 2: originalSource, 3: originalLine, 4: originalColumn } = sourceMap[kMappings][0];
startEntry = { __proto__: null, originalSource, originalLine, originalColumn };
}
if (!startEntry.originalSource || startEntry.originalSource !== endEntry.originalSource) {
// The range is not mappable. Skip it.
continue;
}
newUrl ??= startEntry?.originalSource;
const mappedLines = this.getLines(newUrl);
const mappedStartOffset = this.entryToOffset(startEntry, mappedLines);
const mappedEndOffset = this.entryToOffset(endEntry, mappedLines) + 1;
for (let l = startEntry.originalLine; l <= endEntry.originalLine; l++) {
mappedLines[l].count = count;
}
ArrayPrototypePush(newRanges, {
__proto__: null, startOffset: mappedStartOffset, endOffset: mappedEndOffset, count,
});
}
if (!newUrl) {
// No mappable ranges. Skip the function.
continue;
}
const newScript = newResult.get(newUrl) ?? { __proto__: null, url: newUrl, functions: [] };
ArrayPrototypePush(newScript.functions, { __proto__: null, functionName, ranges: newRanges, isBlockCoverage });
newResult.set(newUrl, newScript);
}
}
return ArrayFrom(newResult.values());
}
entryToOffset(entry, lines) {
const line = MathMax(entry.originalLine, 0);
return MathMin(lines[line].startOffset + entry.originalColumn, lines[line].endOffset);
}
}
function toPercentage(covered, total) {
@ -342,13 +455,7 @@ function mapRangeToLines(range, lines) {
while (endOffset > line?.startOffset) {
// If the range is not covered, and the range covers the entire line,
// then mark that line as not covered.
if (count === 0 && startOffset <= line.startOffset &&
endOffset >= line.endOffset) {
line.covered = false;
line.count = 0;
}
if (count > 0 && startOffset <= line.startOffset &&
endOffset >= line.endOffset) {
if (startOffset <= line.startOffset && endOffset >= line.endOffset) {
line.count = count;
}
@ -370,36 +477,17 @@ function mapRangeToLines(range, lines) {
}
}
// Add some useful data to the range. The test runner has read these ranges
// from a file, so we own the data structures and can do what we want.
range.lines = mappedLines;
range.ignoredLines = ignoredLines;
return { __proto__: null, lines: mappedLines, ignoredLines };
}
function getCoverageFromDirectory(coverageDirectory) {
const result = new SafeMap();
let dir;
try {
dir = opendirSync(coverageDirectory);
for (let entry; (entry = dir.readSync()) !== null;) {
if (RegExpPrototypeExec(kCoverageFileRegex, entry.name) === null) {
continue;
}
const coverageFile = join(coverageDirectory, entry.name);
const coverage = JSONParse(readFileSync(coverageFile, 'utf8'));
mergeCoverage(result, coverage.result);
}
return ArrayFrom(result.values());
} finally {
if (dir) {
dir.closeSync();
}
}
function shouldSkipFileCoverage(url) {
// The first part of this check filters out the node_modules/ directory
// from the results. This filter is applied first because most real world
// applications will be dominated by third party dependencies. The second
// part of the check filters out core modules, which start with 'node:' in
// coverage reports, as well as any invalid coverages which have been
// observed on Windows.
return StringPrototypeIncludes(url, '/node_modules/') || !StringPrototypeStartsWith(url, 'file:');
}
function mergeCoverage(merged, coverage) {
@ -407,14 +495,7 @@ function mergeCoverage(merged, coverage) {
const newScript = coverage[i];
const { url } = newScript;
// The first part of this check filters out the node_modules/ directory
// from the results. This filter is applied first because most real world
// applications will be dominated by third party dependencies. The second
// part of the check filters out core modules, which start with 'node:' in
// coverage reports, as well as any invalid coverages which have been
// observed on Windows.
if (StringPrototypeIncludes(url, '/node_modules/') ||
!StringPrototypeStartsWith(url, 'file:')) {
if (shouldSkipFileCoverage(url)) {
continue;
}

View File

@ -0,0 +1,13 @@
The files in the directory are generated by the
following commands:
```sh
npx esbuild a.test.ts --sourcemap --outdir=. --out-extension:.js=.mjs --sources-content=false --minify --bundle --platform=node --format=esm
echo "import { test } from 'node:test';
test('ok', () => {});
function uncovered() {
return 'uncovered';
}
" | npx esbuild --sourcemap --sourcefile=stdin.test.ts --sources-content=true --bundle --platform=node --outfile="stdin.test.js"
```

View File

@ -0,0 +1,2 @@
import{test as o}from"node:test";import{strictEqual as r}from"node:assert";function e(){r(1,2)}o("fails",()=>{e()});
//# sourceMappingURL=a.test.mjs.map

View File

@ -0,0 +1,6 @@
{
"version": 3,
"sources": ["a.test.ts", "b.test.ts"],
"mappings": "AAAA,OAAS,QAAAA,MAAY,YCArB,OAAS,eAAAC,MAAmB,cAErB,SAASC,GAAU,CACxBD,EAAY,EAAG,CAAC,CAClB,CDDAE,EAAK,QAAS,IAAM,CAClBC,EAAQ,CACV,CAAC",
"names": ["test", "strictEqual", "covered", "test", "covered"]
}

View File

@ -0,0 +1,13 @@
import { test } from 'node:test';
import { covered } from './b.test';
test('fails', () => {
covered();
});
function uncovered() {
return 'uncovered';
}
if (false) {
uncovered();
}

View File

@ -0,0 +1,9 @@
import { strictEqual } from 'node:assert';
export function covered() {
strictEqual(1, 2);
}
export function uncovered() {
return 'uncovered';
}

View File

@ -0,0 +1,7 @@
'use strict';
const test = require('node:test');
test('no soucre map', () => {});
if (false) {
console.log('this does not execute');
}

View File

@ -0,0 +1,5 @@
// stdin.test.ts
var import_node_test = require("node:test");
(0, import_node_test.test)("ok", () => {
});
//# sourceMappingURL=stdin.test.js.map

View File

@ -0,0 +1,7 @@
{
"version": 3,
"sources": ["stdin.test.ts"],
"sourcesContent": ["import { test } from 'node:test';\ntest('ok', () => {});\n\nfunction uncovered() {\n return 'uncovered';\n}\n\n"],
"mappings": ";AAAA,uBAAqB;AAAA,IACrB,uBAAK,MAAM,MAAM;AAAC,CAAC;",
"names": []
}

View File

@ -242,3 +242,34 @@ test('coverage reports on lines, functions, and branches', skipIfNoInspector, as
});
});
});
test('coverage with source maps', skipIfNoInspector, () => {
let report = [
'# start of coverage report',
'# --------------------------------------------------------------',
'# file | line % | branch % | funcs % | uncovered lines',
'# --------------------------------------------------------------',
'# a.test.ts | 53.85 | 100.00 | 100.00 | 8-13', // part of a bundle
'# b.test.ts | 55.56 | 100.00 | 100.00 | 1 7-9', // part of a bundle
'# index.test.js | 71.43 | 66.67 | 100.00 | 6-7', // no source map
'# stdin.test.ts | 57.14 | 100.00 | 100.00 | 4-6', // Source map without original file
'# --------------------------------------------------------------',
'# all files | 58.33 | 87.50 | 100.00 |',
'# --------------------------------------------------------------',
'# end of coverage report',
].join('\n');
if (common.isWindows) {
report = report.replaceAll('/', '\\');
}
const fixture = fixtures.path('test-runner', 'coverage');
const args = [
'--test', '--experimental-test-coverage', '--test-reporter', 'tap',
];
const result = spawnSync(process.execPath, args, { cwd: fixture });
assert.strictEqual(result.stderr.toString(), '');
assert(result.stdout.toString().includes(report));
assert.strictEqual(result.status, 1);
});