chore(merge): console status updates (#24559)

This commit is contained in:
Yury Semikhatsky 2023-08-02 14:49:06 -07:00 committed by GitHub
parent b0473b71cd
commit a867e738db
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 100 additions and 15 deletions

View file

@ -189,8 +189,11 @@ async function mergeReports(reportDir: string | undefined, opts: { [key: string]
const configLoader = new ConfigLoader(); const configLoader = new ConfigLoader();
const config = await (configFile ? configLoader.loadConfigFile(configFile) : configLoader.loadEmptyConfig(process.cwd())); const config = await (configFile ? configLoader.loadConfigFile(configFile) : configLoader.loadEmptyConfig(process.cwd()));
const dir = path.resolve(process.cwd(), reportDir || ''); const dir = path.resolve(process.cwd(), reportDir || '');
if (!(await fs.promises.stat(dir)).isDirectory()) const dirStat = await fs.promises.stat(dir).catch(e => null);
if (!dirStat)
throw new Error('Directory does not exist: ' + dir); throw new Error('Directory does not exist: ' + dir);
if (!dirStat.isDirectory())
throw new Error(`"${dir}" is not a directory`);
let reporterDescriptions: ReporterDescription[] | undefined = resolveReporterOption(opts.reporter); let reporterDescriptions: ReporterDescription[] | undefined = resolveReporterOption(opts.reporter);
if (!reporterDescriptions && configFile) if (!reporterDescriptions && configFile)
reporterDescriptions = config.config.reporter; reporterDescriptions = config.config.reporter;

View file

@ -25,19 +25,35 @@ import { createReporters } from '../runner/reporters';
import { Multiplexer } from './multiplexer'; import { Multiplexer } from './multiplexer';
import { ZipFile } from 'playwright-core/lib/utils'; import { ZipFile } from 'playwright-core/lib/utils';
import type { BlobReportMetadata } from './blob'; import type { BlobReportMetadata } from './blob';
import { relativeFilePath } from '../util';
type StatusCallback = (message: string) => void;
export async function createMergedReport(config: FullConfigInternal, dir: string, reporterDescriptions: ReporterDescription[]) { export async function createMergedReport(config: FullConfigInternal, dir: string, reporterDescriptions: ReporterDescription[]) {
const reporters = await createReporters(config, 'merge', reporterDescriptions);
const multiplexer = new Multiplexer(reporters);
const receiver = new TeleReporterReceiver(path.sep, multiplexer, false, config.config);
let printStatus: StatusCallback = () => {};
if (!multiplexer.printsToStdio()) {
printStatus = printStatusToStdout;
printStatus(`merging reports from ${dir}`);
}
const shardFiles = await sortedShardFiles(dir); const shardFiles = await sortedShardFiles(dir);
if (shardFiles.length === 0) if (shardFiles.length === 0)
throw new Error(`No report files found in ${dir}`); throw new Error(`No report files found in ${dir}`);
const events = await mergeEvents(dir, shardFiles); const events = await mergeEvents(dir, shardFiles, printStatus);
patchAttachmentPaths(events, dir); patchAttachmentPaths(events, dir);
const reporters = await createReporters(config, 'merge', reporterDescriptions); printStatus(`processing ${events.length} test events`);
const receiver = new TeleReporterReceiver(path.sep, new Multiplexer(reporters), false, config.config); for (const event of events) {
if (event.method === 'onEnd')
for (const event of events) printStatus(`building final report`);
await receiver.dispatch(event); await receiver.dispatch(event);
if (event.method === 'onEnd')
printStatus(`finished building report`);
}
} }
function patchAttachmentPaths(events: JsonEvent[], resourceDir: string) { function patchAttachmentPaths(events: JsonEvent[], resourceDir: string) {
@ -57,11 +73,13 @@ function parseEvents(reportJsonl: Buffer): JsonEvent[] {
return reportJsonl.toString().split('\n').filter(line => line.length).map(line => JSON.parse(line)) as JsonEvent[]; return reportJsonl.toString().split('\n').filter(line => line.length).map(line => JSON.parse(line)) as JsonEvent[];
} }
async function extractAndParseReports(dir: string, shardFiles: string[]): Promise<{ metadata: BlobReportMetadata, parsedEvents: JsonEvent[] }[]> { async function extractAndParseReports(dir: string, shardFiles: string[], printStatus: StatusCallback): Promise<{ metadata: BlobReportMetadata, parsedEvents: JsonEvent[] }[]> {
const shardEvents = []; const shardEvents = [];
await fs.promises.mkdir(path.join(dir, 'resources'), { recursive: true }); await fs.promises.mkdir(path.join(dir, 'resources'), { recursive: true });
for (const file of shardFiles) { for (const file of shardFiles) {
const zipFile = new ZipFile(path.join(dir, file)); const absolutePath = path.join(dir, file);
printStatus(`extracting: ${relativeFilePath(absolutePath)}`);
const zipFile = new ZipFile(absolutePath);
const entryNames = await zipFile.entries(); const entryNames = await zipFile.entries();
for (const entryName of entryNames) { for (const entryName of entryNames) {
const content = await zipFile.read(entryName); const content = await zipFile.read(entryName);
@ -87,18 +105,19 @@ function findMetadata(events: JsonEvent[], file: string): BlobReportMetadata {
return events[0].params; return events[0].params;
} }
async function mergeEvents(dir: string, shardReportFiles: string[]) { async function mergeEvents(dir: string, shardReportFiles: string[], printStatus: StatusCallback) {
const events: JsonEvent[] = []; const events: JsonEvent[] = [];
const configureEvents: JsonEvent[] = []; const configureEvents: JsonEvent[] = [];
const beginEvents: JsonEvent[] = []; const beginEvents: JsonEvent[] = [];
const endEvents: JsonEvent[] = []; const endEvents: JsonEvent[] = [];
const shardEvents = await extractAndParseReports(dir, shardReportFiles); const shardEvents = await extractAndParseReports(dir, shardReportFiles, printStatus);
shardEvents.sort((a, b) => { shardEvents.sort((a, b) => {
const shardA = a.metadata.shard?.current ?? 0; const shardA = a.metadata.shard?.current ?? 0;
const shardB = b.metadata.shard?.current ?? 0; const shardB = b.metadata.shard?.current ?? 0;
return shardA - shardB; return shardA - shardB;
}); });
const allTestIds = new Set<string>(); const allTestIds = new Set<string>();
printStatus(`merging events`);
for (const { parsedEvents } of shardEvents) { for (const { parsedEvents } of shardEvents) {
for (const event of parsedEvents) { for (const event of parsedEvents) {
if (event.method === 'onConfigure') if (event.method === 'onConfigure')
@ -213,6 +232,10 @@ async function sortedShardFiles(dir: string) {
return files.filter(file => file.startsWith('report-') && file.endsWith('.zip')).sort(); return files.filter(file => file.startsWith('report-') && file.endsWith('.zip')).sort();
} }
function printStatusToStdout(message: string) {
process.stdout.write(`${message}\n`);
}
class ProjectNamePatcher { class ProjectNamePatcher {
private _testIds = new Set<string>(); private _testIds = new Set<string>();

View file

@ -69,7 +69,7 @@ export async function createReporters(config: FullConfigInternal, mode: 'list' |
// Important to put it first, jsut in case some other reporter stalls onEnd. // Important to put it first, jsut in case some other reporter stalls onEnd.
if (mode === 'list') if (mode === 'list')
reporters.unshift(new ListModeReporter()); reporters.unshift(new ListModeReporter());
else else if (mode !== 'merge')
reporters.unshift(!process.env.CI ? new LineReporter({ omitFailures: true }) : new DotReporter()); reporters.unshift(!process.env.CI ? new LineReporter({ omitFailures: true }) : new DotReporter());
} }
return reporters; return reporters;

View file

@ -412,6 +412,65 @@ test('merge into list report by default', async ({ runInlineTest, mergeReports }
]); ]);
}); });
test('should print progress', async ({ runInlineTest, mergeReports }) => {
const reportDir = test.info().outputPath('blob-report');
const files = {
'playwright.config.ts': `
module.exports = {
retries: 1,
reporter: [['blob', { outputDir: '${reportDir.replace(/\\/g, '/')}' }]]
};
`,
'a.test.js': `
import { test, expect } from '@playwright/test';
test('math 1', async ({}) => {
expect(1 + 1).toBe(2);
});
test('failing 1', async ({}) => {
expect(1).toBe(2);
});
test('flaky 1', async ({}) => {
expect(test.info().retry).toBe(1);
});
test.skip('skipped 1', async ({}) => {});
`,
'b.test.js': `
import { test, expect } from '@playwright/test';
test('math 2', async ({}) => {
expect(1 + 1).toBe(2);
});
test('failing 2', async ({}) => {
expect(1).toBe(2);
});
test.skip('skipped 2', async ({}) => {});
`,
'c.test.js': `
import { test, expect } from '@playwright/test';
test('math 3', async ({}) => {
expect(1 + 1).toBe(2);
});
test('flaky 2', async ({}) => {
expect(test.info().retry).toBe(1);
});
test.skip('skipped 3', async ({}) => {});
`
};
await runInlineTest(files, { shard: `1/2` }, { PWTEST_BLOB_DO_NOT_REMOVE: '1' });
await runInlineTest(files, { shard: `2/2` }, { PWTEST_BLOB_DO_NOT_REMOVE: '1' });
const reportFiles = await fs.promises.readdir(reportDir);
reportFiles.sort();
expect(reportFiles).toEqual([expect.stringMatching(/report-.*.zip/), expect.stringMatching(/report-.*.zip/)]);
const { exitCode, output } = await mergeReports(reportDir, { PW_TEST_HTML_REPORT_OPEN: 'never' }, { additionalArgs: ['--reporter', 'html'] });
expect(exitCode).toBe(0);
const lines = output.split('\n');
expect(lines).toContainEqual(expect.stringMatching(/extracting: blob-report[\/\\]report-.*zip$/));
expect(lines).toContainEqual(expect.stringMatching(/merging events$/));
expect(lines).toContainEqual(expect.stringMatching(/building final report/));
expect(lines).toContainEqual(expect.stringMatching(/finished building report/));
});
test('preserve attachments', async ({ runInlineTest, mergeReports, showReport, page }) => { test('preserve attachments', async ({ runInlineTest, mergeReports, showReport, page }) => {
const reportDir = test.info().outputPath('blob-report'); const reportDir = test.info().outputPath('blob-report');
const files = { const files = {

View file

@ -344,7 +344,7 @@ for (const useIntermediateMergeReport of [false, true] as const) {
video: 'on', video: 'on',
trace: 'on', trace: 'on',
}, },
reporter: [['html', { attachmentsBaseURL: 'https://some-url.com/' }]] reporter: [['html', { attachmentsBaseURL: 'https://some-url.com/' }], ['line']]
}; };
`, `,
'a.test.js': ` 'a.test.js': `
@ -1019,7 +1019,7 @@ for (const useIntermediateMergeReport of [false, true] as const) {
test('pass', ({}, testInfo) => { test('pass', ({}, testInfo) => {
}); });
` `
}, { 'reporter': 'html' }, { PW_TEST_HTML_REPORT_OPEN: 'never' }, { }, { 'reporter': 'html,line' }, { PW_TEST_HTML_REPORT_OPEN: 'never' }, {
cwd: 'foo/bar/baz/tests', cwd: 'foo/bar/baz/tests',
}); });
expect(result.exitCode).toBe(0); expect(result.exitCode).toBe(0);
@ -1043,7 +1043,7 @@ for (const useIntermediateMergeReport of [false, true] as const) {
test('pass', ({}, testInfo) => { test('pass', ({}, testInfo) => {
}); });
` `
}, { 'reporter': 'html' }, { 'PW_TEST_HTML_REPORT_OPEN': 'never', 'PLAYWRIGHT_HTML_REPORT': '../my-report' }, { }, { 'reporter': 'html,line' }, { 'PW_TEST_HTML_REPORT_OPEN': 'never', 'PLAYWRIGHT_HTML_REPORT': '../my-report' }, {
cwd: 'foo/bar/baz/tests', cwd: 'foo/bar/baz/tests',
}); });
expect(result.exitCode).toBe(0); expect(result.exitCode).toBe(0);

View file

@ -438,7 +438,7 @@ for (const useIntermediateMergeReport of [false, true] as const) {
test('pass', ({}, testInfo) => { test('pass', ({}, testInfo) => {
}); });
` `
}, { 'reporter': 'junit' }, { 'PLAYWRIGHT_JUNIT_OUTPUT_NAME': '../my-report.xml' }, { }, { 'reporter': 'junit,line' }, { 'PLAYWRIGHT_JUNIT_OUTPUT_NAME': '../my-report.xml' }, {
cwd: 'foo/bar/baz/tests', cwd: 'foo/bar/baz/tests',
}); });
expect(result.exitCode).toBe(0); expect(result.exitCode).toBe(0);