parent
0dd090aeab
commit
a03f3223c4
|
|
@ -43,7 +43,7 @@ import {
|
|||
import { toMatchSnapshot, toHaveScreenshot } from './toMatchSnapshot';
|
||||
import type { Expect } from '../common/types';
|
||||
import { currentTestInfo, currentExpectTimeout } from '../common/globals';
|
||||
import { serializeError, captureStackTrace } from '../util';
|
||||
import { serializeError, captureStackTrace, trimLongString } from '../util';
|
||||
import {
|
||||
expect as expectLibrary,
|
||||
INVERTED_COLOR,
|
||||
|
|
@ -201,7 +201,7 @@ class ExpectMetaInfoProxyHandler {
|
|||
const step = testInfo._addStep({
|
||||
location: frame && frame.file ? { file: path.resolve(process.cwd(), frame.file), line: frame.line || 0, column: frame.column || 0 } : undefined,
|
||||
category: 'expect',
|
||||
title: customMessage || defaultTitle,
|
||||
title: trimLongString(customMessage || defaultTitle, 1024),
|
||||
canHaveChildren: true,
|
||||
forceNoParent: false
|
||||
});
|
||||
|
|
|
|||
|
|
@ -545,3 +545,29 @@ test('should print timed out error message', async ({ runInlineTest }) => {
|
|||
const output = stripAnsi(result.output);
|
||||
expect(output).toContain('Timed out 1ms waiting for expect(received).toBeChecked()');
|
||||
});
|
||||
|
||||
test('should not leak long expect message strings', async ({ runInlineTest }) => {
|
||||
const result = await runInlineTest({
|
||||
'a.test.ts': `
|
||||
const { test } = pwt;
|
||||
|
||||
let logs: string = 'Ab';
|
||||
const consoleLogWatcher = (msg: ConsoleMessage) => {
|
||||
if (logs.length < (1<<28))
|
||||
logs += logs;
|
||||
expect(msg.text(), logs).toMatch(/^\\d+$/);
|
||||
}
|
||||
|
||||
test('main', async ({ page }) => {
|
||||
page.on('console', consoleLogWatcher);
|
||||
await page.evaluate(() => {
|
||||
for (let i = 0; i < 20; i++)
|
||||
console.log(i);
|
||||
});
|
||||
});
|
||||
`,
|
||||
}, { workers: 1 });
|
||||
// expect(result.output).toBe('');
|
||||
expect(result.failed).toBe(0);
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
|
|
|
|||
Loading…
Reference in a new issue