feat(test runner): run tests with different worker fixtures in a single worker
Previously, we forced a new worker for a new set of worker fixtures. Now, we can reuse the worker by tearing down mismatching worker fixtures between tests. The new behavior allows all tests from a single file run in-order, even when mixing various test types with different worker fixtures. This is less efficient, but more predictable for the user. Switching between worker fixtures is reported under "Reset Fixtures" step inside "Before Hooks" and has a separate timeout. We still force a new worker for each project and repeatEach index.
This commit is contained in:
parent
014577d345
commit
621897c385
|
|
@ -116,6 +116,7 @@ export type DonePayload = {
|
|||
fatalErrors: TestInfoError[];
|
||||
skipTestsDueToSetupFailure: string[]; // test ids
|
||||
fatalUnknownTestIds?: string[];
|
||||
lastTestPoolDigest?: string;
|
||||
};
|
||||
|
||||
export type TestOutputPayload = {
|
||||
|
|
|
|||
|
|
@ -77,10 +77,7 @@ export function bindFileSuiteToProject(project: FullProjectInternal, suite: Suit
|
|||
// Skip annotations imply skipped expectedStatus.
|
||||
if (test.annotations.some(a => a.type === 'skip' || a.type === 'fixme'))
|
||||
test.expectedStatus = 'skipped';
|
||||
|
||||
// We only compute / set digest in the runner.
|
||||
if (test._poolDigest)
|
||||
test._workerHash = `${project.id}-${test._poolDigest}-0`;
|
||||
test._workerHash = `${project.id}-0`;
|
||||
});
|
||||
|
||||
return result;
|
||||
|
|
@ -95,9 +92,7 @@ export function applyRepeatEachIndex(project: FullProjectInternal, fileSuite: Su
|
|||
const testId = suite._fileId + '-' + calculateSha1(testIdExpression).slice(0, 20);
|
||||
test.id = testId;
|
||||
test.repeatEachIndex = repeatEachIndex;
|
||||
|
||||
if (test._poolDigest)
|
||||
test._workerHash = `${project.id}-${test._poolDigest}-${repeatEachIndex}`;
|
||||
test._workerHash = `${project.id}-${repeatEachIndex}`;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -56,8 +56,10 @@ export class Dispatcher {
|
|||
return;
|
||||
const job = this._queue[0];
|
||||
|
||||
// 2. Find a worker with the same hash, or just some free worker.
|
||||
let index = this._workerSlots.findIndex(w => !w.busy && w.worker && w.worker.hash() === job.workerHash && !w.worker.didSendStop());
|
||||
// 2. Find a worker with the same hash, preferably with the same digest, or just some free worker.
|
||||
let index = this._workerSlots.findIndex(w => !w.busy && w.worker && !w.worker.didSendStop() && w.worker.hash() === job.workerHash && w.worker.poolDigest === job.firstPoolDigest);
|
||||
if (index === -1)
|
||||
index = this._workerSlots.findIndex(w => !w.busy && w.worker && !w.worker.didSendStop() && w.worker.hash() === job.workerHash);
|
||||
if (index === -1)
|
||||
index = this._workerSlots.findIndex(w => !w.busy);
|
||||
// No workers available, bail out.
|
||||
|
|
@ -113,6 +115,7 @@ export class Dispatcher {
|
|||
const result = await jobDispatcher.jobResult;
|
||||
this._workerSlots[index].jobDispatcher = undefined;
|
||||
this._updateCounterForWorkerHash(job.workerHash, -1);
|
||||
worker.poolDigest = result.lastTestPoolDigest || worker.poolDigest;
|
||||
|
||||
// 4. When worker encounters error, we stop it and create a new one.
|
||||
// We also do not keep the worker alive if it cannot serve any more jobs.
|
||||
|
|
@ -231,7 +234,7 @@ export class Dispatcher {
|
|||
}
|
||||
|
||||
class JobDispatcher {
|
||||
jobResult = new ManualPromise<{ newJob?: TestGroup, didFail: boolean }>();
|
||||
jobResult = new ManualPromise<{ newJob?: TestGroup, didFail: boolean, lastTestPoolDigest?: string }>();
|
||||
|
||||
private _listeners: RegisteredListener[] = [];
|
||||
private _failedTests = new Set<TestCase>();
|
||||
|
|
@ -402,7 +405,7 @@ class JobDispatcher {
|
|||
// - we are here not because something failed
|
||||
// - no unrecoverable worker error
|
||||
if (!this._remainingByTestId.size && !this._failedTests.size && !params.fatalErrors.length && !params.skipTestsDueToSetupFailure.length && !params.fatalUnknownTestIds && !params.unexpectedExitError) {
|
||||
this._finished({ didFail: false });
|
||||
this._finished({ didFail: false, lastTestPoolDigest: params.lastTestPoolDigest });
|
||||
return;
|
||||
}
|
||||
|
||||
|
|
@ -476,7 +479,7 @@ class JobDispatcher {
|
|||
|
||||
// This job is over, we will schedule another one.
|
||||
const newJob = remaining.length ? { ...this._job, tests: remaining } : undefined;
|
||||
this._finished({ didFail: true, newJob });
|
||||
this._finished({ didFail: true, newJob, lastTestPoolDigest: params.lastTestPoolDigest });
|
||||
}
|
||||
|
||||
onExit(data: ProcessExitData) {
|
||||
|
|
@ -486,7 +489,7 @@ class JobDispatcher {
|
|||
this._onDone({ skipTestsDueToSetupFailure: [], fatalErrors: [], unexpectedExitError });
|
||||
}
|
||||
|
||||
private _finished(result: { newJob?: TestGroup, didFail: boolean }) {
|
||||
private _finished(result: { newJob?: TestGroup, didFail: boolean, lastTestPoolDigest?: string }) {
|
||||
eventsHelper.removeEventListeners(this._listeners);
|
||||
this.jobResult.resolve(result);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ import type { Suite, TestCase } from '../common/test';
|
|||
|
||||
export type TestGroup = {
|
||||
workerHash: string;
|
||||
firstPoolDigest: string;
|
||||
requireFile: string;
|
||||
repeatEachIndex: number;
|
||||
projectId: string;
|
||||
|
|
@ -25,13 +26,10 @@ export type TestGroup = {
|
|||
};
|
||||
|
||||
export function createTestGroups(projectSuite: Suite, expectedParallelism: number): TestGroup[] {
|
||||
// This function groups tests that can be run together.
|
||||
// Tests cannot be run together when:
|
||||
// - They belong to different projects - requires different workers.
|
||||
// - They have a different repeatEachIndex - requires different workers.
|
||||
// - They have a different set of worker fixtures in the pool - requires different workers.
|
||||
// - They have a different requireFile - reuses the worker, but runs each requireFile separately.
|
||||
// - They belong to a parallel suite.
|
||||
// This function groups tests that must be run together:
|
||||
// - in default mode - all tests in a single requireFile;
|
||||
// - in serial mode - all test from the serial suite.
|
||||
// Otherwise, tests will be put into separate test groups, that may or may not run in the same worker.
|
||||
|
||||
// Using the map "workerHash -> requireFile -> group" makes us preserve the natural order
|
||||
// of worker hashes and require files for the simple cases.
|
||||
|
|
@ -54,6 +52,7 @@ export function createTestGroups(projectSuite: Suite, expectedParallelism: numbe
|
|||
const createGroup = (test: TestCase): TestGroup => {
|
||||
return {
|
||||
workerHash: test._workerHash,
|
||||
firstPoolDigest: test._poolDigest,
|
||||
requireFile: test._requireFile,
|
||||
repeatEachIndex: test.repeatEachIndex,
|
||||
projectId: test._projectId,
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ export class WorkerHost extends ProcessHost {
|
|||
readonly parallelIndex: number;
|
||||
readonly workerIndex: number;
|
||||
private _hash: string;
|
||||
poolDigest: string;
|
||||
private _params: WorkerInitParams;
|
||||
private _didFail = false;
|
||||
|
||||
|
|
@ -42,6 +43,7 @@ export class WorkerHost extends ProcessHost {
|
|||
this.workerIndex = workerIndex;
|
||||
this.parallelIndex = parallelIndex;
|
||||
this._hash = testGroup.workerHash;
|
||||
this.poolDigest = testGroup.firstPoolDigest;
|
||||
|
||||
this._params = {
|
||||
workerIndex: this.workerIndex,
|
||||
|
|
|
|||
|
|
@ -179,15 +179,30 @@ export class FixtureRunner {
|
|||
setPool(pool: FixturePool) {
|
||||
if (!this.testScopeClean)
|
||||
throw new Error('Did not teardown test scope');
|
||||
if (this.pool && pool.digest !== this.pool.digest) {
|
||||
throw new Error([
|
||||
`Playwright detected inconsistent test.use() options.`,
|
||||
`Most common mistakes that lead to this issue:`,
|
||||
` - Calling test.use() outside of the test file, for example in a common helper.`,
|
||||
` - One test file imports from another test file.`,
|
||||
].join('\n'));
|
||||
if (!this.pool) {
|
||||
this.pool = pool;
|
||||
return;
|
||||
}
|
||||
this.pool = pool;
|
||||
|
||||
// Teardown fixtures in the reverse order.
|
||||
const fixtures = Array.from(this.instanceForId.values()).reverse();
|
||||
const collector = new Set<Fixture>();
|
||||
for (const fixture of fixtures) {
|
||||
// Note: compare id instead of instances to allow identical registrations to be reused,
|
||||
// for example from a single test.use() applied to multiple tests.
|
||||
if (pool.resolve(fixture.registration.name)?.id !== fixture.registration.id)
|
||||
fixture._collectFixturesInTeardownOrder('worker', collector);
|
||||
}
|
||||
|
||||
if (!collector.size) {
|
||||
this.pool = pool;
|
||||
return;
|
||||
}
|
||||
|
||||
return async (testInfo: TestInfoImpl, runnable: RunnableDescription) => {
|
||||
await this._teardownFixtureList(collector, testInfo, runnable);
|
||||
this.pool = pool;
|
||||
};
|
||||
}
|
||||
|
||||
private _collectFixturesInSetupOrder(registration: FixtureRegistration, collector: Set<FixtureRegistration>) {
|
||||
|
|
@ -200,26 +215,33 @@ export class FixtureRunner {
|
|||
collector.add(registration);
|
||||
}
|
||||
|
||||
async teardownScope(scope: FixtureScope, testInfo: TestInfoImpl, runnable: RunnableDescription) {
|
||||
// Teardown fixtures in the reverse order.
|
||||
const fixtures = Array.from(this.instanceForId.values()).reverse();
|
||||
const collector = new Set<Fixture>();
|
||||
for (const fixture of fixtures)
|
||||
fixture._collectFixturesInTeardownOrder(scope, collector);
|
||||
private async _teardownFixtureList(fixtures: Set<Fixture>, testInfo: TestInfoImpl, runnable: RunnableDescription) {
|
||||
let firstError: Error | undefined;
|
||||
for (const fixture of collector) {
|
||||
for (const fixture of fixtures) {
|
||||
try {
|
||||
await fixture.teardown(testInfo, runnable);
|
||||
} catch (error) {
|
||||
firstError = firstError ?? error;
|
||||
}
|
||||
}
|
||||
if (scope === 'test')
|
||||
this.testScopeClean = true;
|
||||
if (firstError)
|
||||
throw firstError;
|
||||
}
|
||||
|
||||
async teardownScope(scope: FixtureScope, testInfo: TestInfoImpl, runnable: RunnableDescription) {
|
||||
// Teardown fixtures in the reverse order.
|
||||
const fixtures = Array.from(this.instanceForId.values()).reverse();
|
||||
const collector = new Set<Fixture>();
|
||||
for (const fixture of fixtures)
|
||||
fixture._collectFixturesInTeardownOrder(scope, collector);
|
||||
try {
|
||||
await this._teardownFixtureList(collector, testInfo, runnable);
|
||||
} finally {
|
||||
if (scope === 'test')
|
||||
this.testScopeClean = true;
|
||||
}
|
||||
}
|
||||
|
||||
async resolveParametersForFunction(fn: Function, testInfo: TestInfoImpl, autoFixtures: 'worker' | 'test' | 'all-hooks-only', runnable: RunnableDescription): Promise<object | null> {
|
||||
const collector = new Set<FixtureRegistration>();
|
||||
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ export class WorkerMain extends ProcessRunner {
|
|||
private _currentTest: TestInfoImpl | null = null;
|
||||
private _lastRunningTests: TestCase[] = [];
|
||||
private _totalRunningTests = 0;
|
||||
private _lastTestPoolDigest = '';
|
||||
// Suites that had their beforeAll hooks, but not afterAll hooks executed.
|
||||
// These suites still need afterAll hooks to be executed for the proper cleanup.
|
||||
// Contains dynamic annotations originated by modifiers with a callback, e.g. `test.skip(() => true)`.
|
||||
|
|
@ -230,7 +231,8 @@ export class WorkerMain extends ProcessRunner {
|
|||
const donePayload: DonePayload = {
|
||||
fatalErrors: this._fatalErrors,
|
||||
skipTestsDueToSetupFailure: [],
|
||||
fatalUnknownTestIds
|
||||
fatalUnknownTestIds,
|
||||
lastTestPoolDigest: this._lastTestPoolDigest,
|
||||
};
|
||||
for (const test of this._skipRemainingTestsInSuite?.allTests() || []) {
|
||||
if (entries.has(test.id))
|
||||
|
|
@ -266,9 +268,6 @@ export class WorkerMain extends ProcessRunner {
|
|||
}
|
||||
};
|
||||
|
||||
if (!this._isStopped)
|
||||
this._fixtureRunner.setPool(test._pool!);
|
||||
|
||||
const suites = getSuites(test);
|
||||
const reversedSuites = suites.slice().reverse();
|
||||
const nextSuites = new Set(getSuites(nextTest));
|
||||
|
|
@ -333,6 +332,18 @@ export class WorkerMain extends ProcessRunner {
|
|||
|
||||
let testFunctionParams: object | null = null;
|
||||
await testInfo._runAsStage({ title: 'Before Hooks', stepInfo: { category: 'hook' } }, async () => {
|
||||
const resetFixturesCallback = this._fixtureRunner.setPool(test._pool!);
|
||||
if (resetFixturesCallback) {
|
||||
await testInfo._runAsStage({ title: 'Reset Fixtures', stepInfo: { category: 'hook' } }, async () => {
|
||||
// Tear down worker fixtures from the previous test that are incompatible with this test.
|
||||
// Unfortunately, we do not know about new worker fixtures at the end of previous test.
|
||||
// Give it a separate timeout - not sure what else could it be.
|
||||
const fixturesResetSlot = { timeout: this._project.project.timeout, elapsed: 0 };
|
||||
await resetFixturesCallback(testInfo, { type: 'test', slot: fixturesResetSlot });
|
||||
});
|
||||
}
|
||||
this._lastTestPoolDigest = test._pool!.digest;
|
||||
|
||||
// Run "beforeAll" hooks, unless already run during previous tests.
|
||||
for (const suite of suites)
|
||||
await this._runBeforeAllHooksForSuite(suite, testInfo);
|
||||
|
|
|
|||
|
|
@ -676,44 +676,6 @@ test('should not run user fn when require fixture has failed', async ({ runInlin
|
|||
]);
|
||||
});
|
||||
|
||||
test('should provide helpful error message when digests do not match', async ({ runInlineTest }) => {
|
||||
const result = await runInlineTest({
|
||||
'helper.ts': `
|
||||
import { test as base } from '@playwright/test';
|
||||
export * from '@playwright/test';
|
||||
export const test = base.extend({
|
||||
foo: [ async ({}, use) => use(), { scope: 'worker' } ],
|
||||
});
|
||||
|
||||
test.use({ foo: 'foo' });
|
||||
`,
|
||||
'a.spec.ts': `
|
||||
import { test, expect } from './helper';
|
||||
|
||||
test('test-a', ({ foo }) => {
|
||||
expect(foo).toBe('foo');
|
||||
});
|
||||
`,
|
||||
'b.spec.ts': `
|
||||
import { test, expect } from './helper';
|
||||
|
||||
test('test-b', ({ foo }) => {
|
||||
expect(foo).toBe('foo');
|
||||
});
|
||||
`,
|
||||
'c.spec.ts': `
|
||||
import { test, expect } from './helper';
|
||||
|
||||
test('test-c', ({ foo }) => {
|
||||
expect(foo).toBe('foo');
|
||||
});
|
||||
`,
|
||||
}, { workers: 1 });
|
||||
expect(result.exitCode).toBe(1);
|
||||
expect(result.failed).toBe(1);
|
||||
expect(result.output).toContain('Playwright detected inconsistent test.use() options.');
|
||||
});
|
||||
|
||||
test('tear down base fixture after error in derived', async ({ runInlineTest }) => {
|
||||
const result = await runInlineTest({
|
||||
'a.test.ts': `
|
||||
|
|
|
|||
|
|
@ -664,7 +664,7 @@ test('should not create a new worker for test fixtures', async ({ runInlineTest
|
|||
expect(result.passed).toBe(3);
|
||||
});
|
||||
|
||||
test('should create a new worker for worker fixtures', async ({ runInlineTest }) => {
|
||||
test('should not create a new worker for worker fixtures', async ({ runInlineTest }) => {
|
||||
const result = await runInlineTest({
|
||||
'a.test.ts': `
|
||||
import { test, expect } from '@playwright/test';
|
||||
|
|
@ -699,7 +699,7 @@ test('should create a new worker for worker fixtures', async ({ runInlineTest })
|
|||
expect(result.output).toContain('bar-b');
|
||||
const baseWorker = +result.output.match(/%%base-(\d)/)![1];
|
||||
expect(result.output).toContain(`%%base-${baseWorker}`);
|
||||
expect(result.output).toContain(`%%a-${1 - baseWorker}`);
|
||||
expect(result.output).toContain(`%%a-${baseWorker}`);
|
||||
expect(result.output).toContain(`%%b-${baseWorker}`);
|
||||
expect(result.passed).toBe(3);
|
||||
});
|
||||
|
|
@ -816,3 +816,54 @@ test('automatic worker fixtures should start before automatic test fixtures', as
|
|||
'WORKER FIXTURE 2',
|
||||
]);
|
||||
});
|
||||
|
||||
test('should teardown necessary worker fixtures when switching pools', async ({ runInlineTest }) => {
|
||||
const result = await runInlineTest({
|
||||
'a.spec.ts': `
|
||||
import { test as original, expect } from '@playwright/test';
|
||||
const base = original.extend({
|
||||
base: [async ({}, use) => {
|
||||
console.log('\\n%%base-setup');
|
||||
await use('base');
|
||||
console.log('\\n%%base-teardown');
|
||||
}, { scope: 'worker' }],
|
||||
});
|
||||
|
||||
const worker1 = base.extend({
|
||||
worker1: [async ({ base }, use) => {
|
||||
console.log('\\n%%worker1-setup');
|
||||
await use(base + '1');
|
||||
console.log('\\n%%worker1-teardown');
|
||||
}, { scope: 'worker' }],
|
||||
});
|
||||
|
||||
const worker2 = base.extend({
|
||||
worker2: [async ({ base }, use) => {
|
||||
console.log('\\n%%worker2-setup');
|
||||
await use(base + '2');
|
||||
console.log('\\n%%worker2-teardown');
|
||||
}, { scope: 'worker' }],
|
||||
});
|
||||
|
||||
worker1('test1', async ({ base, worker1 }) => {
|
||||
console.log('\\n%%test1-' + base + '-' + worker1);
|
||||
});
|
||||
|
||||
worker2('test2', async ({ base, worker2 }) => {
|
||||
console.log('\\n%%test2-' + base + '-' + worker2);
|
||||
});
|
||||
`,
|
||||
});
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(result.passed).toBe(2);
|
||||
expect(result.outputLines).toEqual([
|
||||
'base-setup',
|
||||
'worker1-setup',
|
||||
'test1-base-base1',
|
||||
'worker1-teardown',
|
||||
'worker2-setup',
|
||||
'test2-base-base2',
|
||||
'worker2-teardown',
|
||||
'base-teardown',
|
||||
]);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -46,8 +46,11 @@ test('test.extend should work', async ({ runInlineTest }) => {
|
|||
global.logs.push('beforeAll-' + suffix);
|
||||
await run();
|
||||
global.logs.push('afterAll-' + suffix);
|
||||
if (suffix.includes('base'))
|
||||
if (suffix.includes('base')) {
|
||||
global.logs.push('end of worker');
|
||||
console.log(global.logs.join('\\n'));
|
||||
global.logs = [];
|
||||
}
|
||||
}, { scope: 'worker' }],
|
||||
|
||||
baseTest: async ({ suffix, derivedWorker }, run) => {
|
||||
|
|
@ -85,10 +88,6 @@ test('test.extend should work', async ({ runInlineTest }) => {
|
|||
'afterEach-e1',
|
||||
'afterEach-base1',
|
||||
'afterAll-e1',
|
||||
'afterAll-base1',
|
||||
].join('\n'));
|
||||
expect(output).toContain([
|
||||
'beforeAll-base1',
|
||||
'beforeAll-e2',
|
||||
'beforeEach-base1',
|
||||
'beforeEach-e2',
|
||||
|
|
@ -97,6 +96,7 @@ test('test.extend should work', async ({ runInlineTest }) => {
|
|||
'afterEach-base1',
|
||||
'afterAll-e2',
|
||||
'afterAll-base1',
|
||||
'end of worker',
|
||||
].join('\n'));
|
||||
expect(output).toContain([
|
||||
'beforeAll-base2',
|
||||
|
|
@ -107,10 +107,6 @@ test('test.extend should work', async ({ runInlineTest }) => {
|
|||
'afterEach-e1',
|
||||
'afterEach-base2',
|
||||
'afterAll-e1',
|
||||
'afterAll-base2',
|
||||
].join('\n'));
|
||||
expect(output).toContain([
|
||||
'beforeAll-base2',
|
||||
'beforeAll-e2',
|
||||
'beforeEach-base2',
|
||||
'beforeEach-e2',
|
||||
|
|
@ -119,6 +115,7 @@ test('test.extend should work', async ({ runInlineTest }) => {
|
|||
'afterEach-base2',
|
||||
'afterAll-e2',
|
||||
'afterAll-base2',
|
||||
'end of worker',
|
||||
].join('\n'));
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -1267,3 +1267,61 @@ hook |After Hooks
|
|||
});
|
||||
expect(exitCode).toBe(0);
|
||||
});
|
||||
|
||||
test('should teardown necessary worker fixtures when switching pools', async ({ runInlineTest }) => {
|
||||
const result = await runInlineTest({
|
||||
'reporter.ts': stepIndentReporter,
|
||||
'playwright.config.ts': `module.exports = { reporter: './reporter' }`,
|
||||
'a.spec.ts': `
|
||||
import { test as original, expect } from '@playwright/test';
|
||||
const base = original.extend({
|
||||
base: [async ({}, use) => {
|
||||
await use('base');
|
||||
}, { scope: 'worker' }],
|
||||
});
|
||||
|
||||
const worker1 = base.extend({
|
||||
worker1: [async ({ base }, use) => {
|
||||
await use(base + '1');
|
||||
}, { scope: 'worker' }],
|
||||
});
|
||||
|
||||
const worker2 = base.extend({
|
||||
worker2: [async ({ base }, use) => {
|
||||
await use(base + '2');
|
||||
}, { scope: 'worker' }],
|
||||
});
|
||||
|
||||
worker1('test1', async ({ base, worker1 }) => {
|
||||
expect(base).toBe('base');
|
||||
expect(worker1).toBe('base1');
|
||||
});
|
||||
|
||||
worker2('test2', async ({ base, worker2 }) => {
|
||||
expect(base).toBe('base');
|
||||
expect(worker2).toBe('failure');
|
||||
});
|
||||
`,
|
||||
}, { reporter: '' });
|
||||
expect(result.exitCode).toBe(1);
|
||||
expect(stripAnsi(result.output)).toBe(`
|
||||
hook |Before Hooks
|
||||
fixture | fixture: base @ a.spec.ts:3
|
||||
fixture | fixture: worker1 @ a.spec.ts:9
|
||||
expect |expect.toBe @ a.spec.ts:22
|
||||
expect |expect.toBe @ a.spec.ts:23
|
||||
hook |After Hooks
|
||||
hook |Before Hooks
|
||||
hook | Reset Fixtures
|
||||
fixture | fixture: worker1 @ a.spec.ts:9
|
||||
fixture | fixture: worker2 @ a.spec.ts:15
|
||||
expect |expect.toBe @ a.spec.ts:27
|
||||
expect |expect.toBe @ a.spec.ts:28
|
||||
expect |↪ error: Error: expect(received).toBe(expected) // Object.is equality
|
||||
hook |After Hooks
|
||||
hook |Worker Cleanup
|
||||
fixture | fixture: worker2 @ a.spec.ts:15
|
||||
fixture | fixture: base @ a.spec.ts:3
|
||||
|Error: expect(received).toBe(expected) // Object.is equality
|
||||
`);
|
||||
});
|
||||
|
|
|
|||
|
|
@ -119,12 +119,12 @@ test('should run tests with different worker options', async ({ runInlineTest })
|
|||
|
||||
test('test1', ({ foo }, testInfo) => {
|
||||
expect(foo).toBe('bar');
|
||||
expect(testInfo.workerIndex).toBe(1);
|
||||
expect(testInfo.workerIndex).toBe(0);
|
||||
});
|
||||
|
||||
test('test2', ({ foo }, testInfo) => {
|
||||
expect(foo).toBe('bar');
|
||||
expect(testInfo.workerIndex).toBe(1);
|
||||
expect(testInfo.workerIndex).toBe(0);
|
||||
});
|
||||
`,
|
||||
'c.test.ts': `
|
||||
|
|
@ -132,7 +132,7 @@ test('should run tests with different worker options', async ({ runInlineTest })
|
|||
test.use({ foo: 'baz' });
|
||||
test('test2', ({ foo }, testInfo) => {
|
||||
expect(foo).toBe('baz');
|
||||
expect(testInfo.workerIndex).toBe(2);
|
||||
expect(testInfo.workerIndex).toBe(0);
|
||||
});
|
||||
`
|
||||
}, { workers: 1 });
|
||||
|
|
|
|||
Loading…
Reference in a new issue