test runner: remove dependencies on playwright (#3539)
This commit is contained in:
parent
18292325b6
commit
30f4c0c9fe
2
.github/workflows/auto_roll.yml
vendored
2
.github/workflows/auto_roll.yml
vendored
|
|
@ -27,7 +27,7 @@ jobs:
|
|||
# XVFB-RUN merges both STDOUT and STDERR, whereas we need only STDERR
|
||||
# Wrap `npm run` in a subshell to redirect STDERR to file.
|
||||
# Enable core dumps in the subshell.
|
||||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- bash -c "ulimit -c unlimited && node test/runner test/ --jobs=1 --forbid-only --timeout=30000"
|
||||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- bash -c "ulimit -c unlimited && node test/runner/cli test/ --jobs=1 --forbid-only --timeout=30000"
|
||||
env:
|
||||
BROWSER: ${{ matrix.browser }}
|
||||
DEBUG: "pw:*,-pw:wrapped*,-pw:test*"
|
||||
|
|
|
|||
10
.github/workflows/tests.yml
vendored
10
.github/workflows/tests.yml
vendored
|
|
@ -37,7 +37,7 @@ jobs:
|
|||
# XVFB-RUN merges both STDOUT and STDERR, whereas we need only STDERR
|
||||
# Wrap `npm run` in a subshell to redirect STDERR to file.
|
||||
# Enable core dumps in the subshell.
|
||||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- bash -c "ulimit -c unlimited && node test/runner test/ --jobs=1 --forbid-only --timeout=30000 && npm run coverage"
|
||||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- bash -c "ulimit -c unlimited && node test/runner/cli test/ --jobs=1 --forbid-only --timeout=30000 && npm run coverage"
|
||||
env:
|
||||
BROWSER: ${{ matrix.browser }}
|
||||
DEBUG: "pw:*,-pw:wrapped*,-pw:test*"
|
||||
|
|
@ -68,7 +68,7 @@ jobs:
|
|||
- uses: microsoft/playwright-github-action@v1
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: node test/runner test/ --jobs=1 --forbid-only --timeout=30000
|
||||
- run: node test/runner/cli test/ --jobs=1 --forbid-only --timeout=30000
|
||||
env:
|
||||
BROWSER: ${{ matrix.browser }}
|
||||
DEBUG: "pw:*,-pw:wrapped*,-pw:test*"
|
||||
|
|
@ -102,7 +102,7 @@ jobs:
|
|||
- uses: microsoft/playwright-github-action@v1
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
- run: node test/runner test/ --jobs=1 --forbid-only --timeout=30000
|
||||
- run: node test/runner/cli test/ --jobs=1 --forbid-only --timeout=30000
|
||||
shell: bash
|
||||
env:
|
||||
BROWSER: ${{ matrix.browser }}
|
||||
|
|
@ -159,7 +159,7 @@ jobs:
|
|||
# XVFB-RUN merges both STDOUT and STDERR, whereas we need only STDERR
|
||||
# Wrap `npm run` in a subshell to redirect STDERR to file.
|
||||
# Enable core dumps in the subshell.
|
||||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- bash -c "ulimit -c unlimited && node test/runner test/ --jobs=1 --forbid-only --timeout=30000"
|
||||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- bash -c "ulimit -c unlimited && node test/runner/cli test/ --jobs=1 --forbid-only --timeout=30000"
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
BROWSER: ${{ matrix.browser }}
|
||||
|
|
@ -197,7 +197,7 @@ jobs:
|
|||
# XVFB-RUN merges both STDOUT and STDERR, whereas we need only STDERR
|
||||
# Wrap `npm run` in a subshell to redirect STDERR to file.
|
||||
# Enable core dumps in the subshell.
|
||||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- bash -c "ulimit -c unlimited && node test/runner test/ --jobs=1 --forbid-only --timeout=30000"
|
||||
- run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- bash -c "ulimit -c unlimited && node test/runner/cli test/ --jobs=1 --forbid-only --timeout=30000"
|
||||
env:
|
||||
BROWSER: ${{ matrix.browser }}
|
||||
DEBUG: "pw:*,-pw:wrapped*,-pw:test*"
|
||||
|
|
|
|||
10
package.json
10
package.json
|
|
@ -9,10 +9,10 @@
|
|||
"node": ">=10.15.0"
|
||||
},
|
||||
"scripts": {
|
||||
"ctest": "cross-env BROWSER=chromium node test/runner test/",
|
||||
"ftest": "cross-env BROWSER=firefox node test/runner test/",
|
||||
"wtest": "cross-env BROWSER=webkit node test/runner test/",
|
||||
"test": "npm run ctest && npm run ftest && npm run wtest",
|
||||
"ctest": "cross-env BROWSER=chromium node test/runner/cli test/",
|
||||
"ftest": "cross-env BROWSER=firefox node test/runner/cli test/",
|
||||
"wtest": "cross-env BROWSER=webkit node test/runner/cli test/",
|
||||
"test": "node test/runner/cli test/",
|
||||
"eslint": "[ \"$CI\" = true ] && eslint --quiet -f codeframe --ext js,ts ./src || eslint --ext js,ts ./src",
|
||||
"tsc": "tsc -p .",
|
||||
"tsc-installer": "tsc -p ./src/install/tsconfig.json",
|
||||
|
|
@ -28,7 +28,7 @@
|
|||
"generate-channels": "node utils/generate_channels.js",
|
||||
"typecheck-tests": "tsc -p ./test/",
|
||||
"roll-browser": "node utils/roll_browser.js",
|
||||
"coverage": "node test/runner/checkCoverage.js",
|
||||
"coverage": "node test/checkCoverage.js",
|
||||
"check-deps": "node utils/check_deps.js",
|
||||
"build-driver": "pkg --public --targets node12-linux-x64,node12-macos-x64,node12-win-x64 --out-path=drivers packages/playwright-driver/main.js"
|
||||
},
|
||||
|
|
|
|||
|
|
@ -23,9 +23,8 @@ import { TestServer } from '../utils/testserver/';
|
|||
import { Connection } from '../lib/rpc/client/connection';
|
||||
import { Transport } from '../lib/rpc/transport';
|
||||
import { setUnderTest } from '../lib/helper';
|
||||
import { installCoverageHooks } from './runner/coverage';
|
||||
import { valueFromEnv } from './runner/utils';
|
||||
import { registerFixture, registerWorkerFixture, registerOption, registerOptionGenerator } from './runner/fixtures';
|
||||
import { installCoverageHooks } from './coverage';
|
||||
import { registerFixture, registerWorkerFixture, registerOption, registerOptionGenerator } from './runner';
|
||||
import './runner/builtin.fixtures';
|
||||
|
||||
import {mkdtempAsync, removeFolderAsync} from './utils';
|
||||
|
|
@ -66,11 +65,11 @@ declare global {
|
|||
(global as any).LINUX = platform === 'linux';
|
||||
(global as any).WIN = platform === 'win32';
|
||||
|
||||
registerWorkerFixture('httpService', async ({parallelIndex}, test) => {
|
||||
registerWorkerFixture('httpService', async ({}, test) => {
|
||||
const assetsPath = path.join(__dirname, 'assets');
|
||||
const cachedPath = path.join(__dirname, 'assets', 'cached');
|
||||
|
||||
const port = 8907 + parallelIndex * 2;
|
||||
const port = 8907 + options.parallelIndex * 2;
|
||||
const server = await TestServer.create(assetsPath, port);
|
||||
server.enableHTTPCache(cachedPath);
|
||||
|
||||
|
|
@ -108,7 +107,7 @@ registerWorkerFixture('defaultBrowserOptions', async({browserName}, test) => {
|
|||
});
|
||||
});
|
||||
|
||||
registerWorkerFixture('playwright', async({parallelIndex, browserName}, test) => {
|
||||
registerWorkerFixture('playwright', async({browserName}, test) => {
|
||||
const {coverage, uninstall} = installCoverageHooks(browserName);
|
||||
if (options.WIRE) {
|
||||
const connection = new Connection();
|
||||
|
|
@ -138,7 +137,7 @@ registerWorkerFixture('playwright', async({parallelIndex, browserName}, test) =>
|
|||
|
||||
async function teardownCoverage() {
|
||||
uninstall();
|
||||
const coveragePath = path.join(path.join(__dirname, 'coverage-report'), 'coverage', parallelIndex + '.json');
|
||||
const coveragePath = path.join(__dirname, 'coverage-report', options.parallelIndex + '.json');
|
||||
const coverageJSON = [...coverage.keys()].filter(key => coverage.get(key));
|
||||
await fs.promises.mkdir(path.dirname(coveragePath), { recursive: true });
|
||||
await fs.promises.writeFile(coveragePath, JSON.stringify(coverageJSON, undefined, 2), 'utf8');
|
||||
|
|
@ -211,3 +210,9 @@ registerOption('FFOX', ({browserName}) => browserName === 'firefox');
|
|||
registerOption('WEBKIT', ({browserName}) => browserName === 'webkit');
|
||||
registerOption('HEADLESS', ({}) => !!valueFromEnv('HEADLESS', true));
|
||||
registerOption('WIRE', ({}) => process.env.PWWIRE);
|
||||
|
||||
function valueFromEnv(name, defaultValue) {
|
||||
if (!(name in process.env))
|
||||
return defaultValue;
|
||||
return JSON.parse(process.env[name]);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ if (browserName === 'webkit')
|
|||
api.delete('browserContext.emit("screencaststarted")');
|
||||
api.delete('browserContext.emit("screencaststopped")');
|
||||
|
||||
const coverageDir = path.join(__dirname, '..', 'coverage-report', 'coverage');
|
||||
const coverageDir = path.join(__dirname, 'coverage-report');
|
||||
|
||||
const coveredMethods = new Set();
|
||||
for (const file of getCoverageFiles(coverageDir)) {
|
||||
|
|
@ -15,7 +15,7 @@
|
|||
*/
|
||||
|
||||
import '../base.fixture';
|
||||
import { registerWorkerFixture } from '../runner/fixtures';
|
||||
import { registerWorkerFixture } from '../runner';
|
||||
|
||||
registerWorkerFixture('browser', async ({browserType, defaultBrowserOptions}, test) => {
|
||||
const browser = await browserType.launch({
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
import '../runner/builtin.fixtures';
|
||||
import '../base.fixture';
|
||||
import { registerFixture } from '../runner/fixtures';
|
||||
import { registerFixture } from '../runner';
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
|
|
|||
|
|
@ -64,17 +64,17 @@ function apiForBrowser(browserName) {
|
|||
const BROWSER_CONFIGS = [
|
||||
{
|
||||
name: 'Firefox',
|
||||
events: require('../../lib/events').Events,
|
||||
events: require('../lib/events').Events,
|
||||
},
|
||||
{
|
||||
name: 'WebKit',
|
||||
events: require('../../lib/events').Events,
|
||||
events: require('../lib/events').Events,
|
||||
},
|
||||
{
|
||||
name: 'Chromium',
|
||||
events: {
|
||||
...require('../../lib/events').Events,
|
||||
...require('../../lib/chromium/events').Events,
|
||||
...require('../lib/events').Events,
|
||||
...require('../lib/chromium/events').Events,
|
||||
}
|
||||
},
|
||||
];
|
||||
|
|
@ -82,7 +82,7 @@ function apiForBrowser(browserName) {
|
|||
const events = browserConfig.events;
|
||||
// TODO: we should rethink our api.ts approach to ensure coverage and async stacks.
|
||||
const api = {
|
||||
...require('../../lib/rpc/client/api'),
|
||||
...require('../lib/rpc/client/api'),
|
||||
};
|
||||
|
||||
const filteredKeys = Object.keys(api).filter(apiName => {
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
import './runner/builtin.fixtures';
|
||||
import './base.fixture';
|
||||
import { registerFixture } from './runner/fixtures';
|
||||
import { registerFixture } from './runner';
|
||||
import fs from 'fs';
|
||||
import utils from './utils';
|
||||
import { BrowserType, Browser, BrowserContext, Page } from '..';
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@
|
|||
import './runner/builtin.fixtures';
|
||||
import './base.fixture';
|
||||
|
||||
import { registerFixture } from './runner/fixtures';
|
||||
import { registerFixture } from './runner';
|
||||
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@
|
|||
*/
|
||||
|
||||
import '../base.fixture';
|
||||
import { registerFixture } from '../runner/fixtures';
|
||||
import { registerFixture } from '../runner';
|
||||
import {ElectronApplication, ElectronLauncher, ElectronPage} from '../../electron-types';
|
||||
import path from 'path';
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
import './base.fixture';
|
||||
import { registerFixture } from './runner/fixtures';
|
||||
import { registerFixture } from './runner';
|
||||
|
||||
import path from 'path';
|
||||
import {spawn, execSync} from 'child_process';
|
||||
|
|
|
|||
|
|
@ -97,7 +97,7 @@ it.fail(options.CHROMIUM && !options.HEADLESS)('should exclude patterns', async
|
|||
await browser.close();
|
||||
});
|
||||
|
||||
it('should use socks proxy', async ({ browserType, defaultBrowserOptions, parallelIndex }) => {
|
||||
it('should use socks proxy', async ({ browserType, defaultBrowserOptions }) => {
|
||||
const server = socks.createServer((info, accept, deny) => {
|
||||
let socket;
|
||||
if (socket = accept(true)) {
|
||||
|
|
@ -114,7 +114,7 @@ it('should use socks proxy', async ({ browserType, defaultBrowserOptions, parall
|
|||
].join('\r\n'));
|
||||
}
|
||||
});
|
||||
const socksPort = 9107 + parallelIndex * 2;
|
||||
const socksPort = 9107 + options.parallelIndex * 2;
|
||||
server.listen(socksPort, 'localhost');
|
||||
server.useAuth(socks.auth.None());
|
||||
|
||||
|
|
|
|||
|
|
@ -1,48 +0,0 @@
|
|||
/**
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const util = require('util');
|
||||
const { mkdtempAsync, removeFolderAsync } = require('../utils');
|
||||
const { registerFixture, registerWorkerFixture } = require('./fixtures');
|
||||
const mkdirAsync = util.promisify(fs.mkdir.bind(fs));
|
||||
|
||||
let workerId;
|
||||
let outputDir;
|
||||
|
||||
registerWorkerFixture('parallelIndex', async ({}, test) => {
|
||||
await test(workerId);
|
||||
});
|
||||
|
||||
registerFixture('tmpDir', async ({}, test) => {
|
||||
const tmpDir = await mkdtempAsync(path.join(os.tmpdir(), 'playwright-test-'));
|
||||
await test(tmpDir);
|
||||
await removeFolderAsync(tmpDir).catch(e => {});
|
||||
});
|
||||
|
||||
registerWorkerFixture('outputDir', async ({}, test) => {
|
||||
await mkdirAsync(outputDir, { recursive: true });
|
||||
await test(outputDir);
|
||||
});
|
||||
|
||||
function initializeWorker(options) {
|
||||
workerId = options.workerId;
|
||||
outputDir = options.outputDir;
|
||||
}
|
||||
|
||||
module.exports = { initializeWorker };
|
||||
|
|
@ -14,10 +14,23 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
function valueFromEnv(name, defaultValue) {
|
||||
if (!(name in process.env))
|
||||
return defaultValue;
|
||||
return JSON.parse(process.env[name]);
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { mkdtempAsync, removeFolderAsync } from '../utils';
|
||||
import { registerFixture } from '.';
|
||||
|
||||
declare global {
|
||||
interface Options {
|
||||
parallelIndex: number;
|
||||
}
|
||||
|
||||
interface WorkerState {
|
||||
tmpDir: string;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { valueFromEnv };
|
||||
registerFixture('tmpDir', async ({}, test) => {
|
||||
const tmpDir = await mkdtempAsync(path.join(os.tmpdir(), 'playwright-test-'));
|
||||
await test(tmpDir);
|
||||
await removeFolderAsync(tmpDir).catch(e => {});
|
||||
});
|
||||
110
test/runner/cli.js
Normal file
110
test/runner/cli.js
Normal file
|
|
@ -0,0 +1,110 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const program = require('commander');
|
||||
const { Runner } = require('./runner');
|
||||
const { TestCollector } = require('./testCollector');
|
||||
|
||||
program
|
||||
.version('Version ' + require('../../package.json').version)
|
||||
.option('--forbid-only', 'Fail if exclusive test(s) encountered', false)
|
||||
.option('-g, --grep <grep>', 'Only run tests matching this string or regexp', '.*')
|
||||
.option('-j, --jobs <jobs>', 'Number of concurrent jobs for --parallel; use 1 to run in serial, default: (number of CPU cores / 2)', Math.ceil(require('os').cpus().length / 2))
|
||||
.option('--reporter <reporter>', 'Specify reporter to use', '')
|
||||
.option('--trial-run', 'Only collect the matching tests and report them as passing')
|
||||
.option('--quiet', 'Suppress stdio', false)
|
||||
.option('--debug', 'Run tests in-process for debugging', false)
|
||||
.option('--output <outputDir>', 'Folder for output artifacts, default: test-results', path.join(process.cwd(), 'test-results'))
|
||||
.option('--timeout <timeout>', 'Specify test timeout threshold (in milliseconds), default: 10000', 10000)
|
||||
.option('-u, --update-snapshots', 'Use this flag to re-record every snapshot that fails during this test run')
|
||||
.action(async (command) => {
|
||||
// Collect files]
|
||||
const testDir = path.join(process.cwd(), command.args[0]);
|
||||
const files = collectFiles(testDir, '', command.args.slice(1));
|
||||
|
||||
const testCollector = new TestCollector(files, {
|
||||
forbidOnly: command.forbidOnly || undefined,
|
||||
grep: command.grep,
|
||||
timeout: command.timeout,
|
||||
});
|
||||
const rootSuite = testCollector.suite;
|
||||
if (command.forbidOnly && testCollector.hasOnly()) {
|
||||
console.error('=====================================');
|
||||
console.error(' --forbid-only found a focused test.');
|
||||
console.error('=====================================');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const total = rootSuite.total();
|
||||
if (!total) {
|
||||
console.error('=================');
|
||||
console.error(' no tests found.');
|
||||
console.error('=================');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Trial run does not need many workers, use one.
|
||||
const jobs = (command.trialRun || command.debug) ? 1 : command.jobs;
|
||||
const runner = new Runner(rootSuite, total, {
|
||||
debug: command.debug,
|
||||
quiet: command.quiet,
|
||||
grep: command.grep,
|
||||
jobs,
|
||||
outputDir: command.output,
|
||||
reporter: command.reporter,
|
||||
retries: command.retries,
|
||||
snapshotDir: path.join(testDir, '__snapshots__'),
|
||||
testDir,
|
||||
timeout: command.timeout,
|
||||
trialRun: command.trialRun,
|
||||
updateSnapshots: command.updateSnapshots
|
||||
});
|
||||
await runner.run(files);
|
||||
await runner.stop();
|
||||
process.exit(runner.stats.failures ? 1 : 0);
|
||||
});
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
function collectFiles(testDir, dir, filters) {
|
||||
const fullDir = path.join(testDir, dir);
|
||||
if (fs.statSync(fullDir).isFile())
|
||||
return [fullDir];
|
||||
const files = [];
|
||||
for (const name of fs.readdirSync(fullDir)) {
|
||||
if (fs.lstatSync(path.join(fullDir, name)).isDirectory()) {
|
||||
files.push(...collectFiles(testDir, path.join(dir, name), filters));
|
||||
continue;
|
||||
}
|
||||
if (!name.endsWith('spec.ts'))
|
||||
continue;
|
||||
const relativeName = path.join(dir, name);
|
||||
const fullName = path.join(testDir, relativeName);
|
||||
if (!filters.length) {
|
||||
files.push(fullName);
|
||||
continue;
|
||||
}
|
||||
for (const filter of filters) {
|
||||
if (relativeName.includes(filter)) {
|
||||
files.push(fullName);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
|
@ -14,7 +14,6 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
const crypto = require('crypto');
|
||||
const debug = require('debug');
|
||||
|
||||
const registrations = new Map();
|
||||
|
|
@ -250,15 +249,4 @@ function rerunRegistrations(file, scope) {
|
|||
registrations.set(registration.name, registration);
|
||||
}
|
||||
|
||||
function computeWorkerHash(file) {
|
||||
// At this point, registrationsByFile contains all the files with worker fixture registrations.
|
||||
// For every test, build the require closure and map each file to fixtures declared in it.
|
||||
// This collection of fixtures is the fingerprint of the worker setup, a "worker hash".
|
||||
// Tests with the matching "worker hash" will reuse the same worker.
|
||||
const hash = crypto.createHash('sha1');
|
||||
for (const registration of lookupRegistrations(file, 'worker').values())
|
||||
hash.update(registration.location);
|
||||
return hash.digest('hex');
|
||||
}
|
||||
|
||||
module.exports = { FixturePool, registerFixture, registerWorkerFixture, computeWorkerHash, rerunRegistrations, lookupRegistrations, fixturesForCallback, registerOption, registerOptionGenerator, setOptions, optionRegistrations, options };
|
||||
module.exports = { FixturePool, registerFixture, registerWorkerFixture, rerunRegistrations, lookupRegistrations, fixturesForCallback, registerOption, registerOptionGenerator, setOptions, optionRegistrations, options };
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
* Copyright Microsoft Corporation. All rights reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
|
|
@ -14,97 +14,6 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const program = require('commander');
|
||||
const { Runner } = require('./runner');
|
||||
const { TestCollector } = require('./testCollector');
|
||||
const { options, registerFixture, registerWorkerFixture, registerOption, registerOptionGenerator } = require('./fixtures');
|
||||
|
||||
program
|
||||
.version('Version ' + require('../../package.json').version)
|
||||
.option('--forbid-only', 'Fail if exclusive test(s) encountered', false)
|
||||
.option('-g, --grep <grep>', 'Only run tests matching this string or regexp', '.*')
|
||||
.option('-j, --jobs <jobs>', 'Number of concurrent jobs for --parallel; use 1 to run in serial, default: (number of CPU cores / 2)', Math.ceil(require('os').cpus().length / 2))
|
||||
.option('--reporter <reporter>', 'Specify reporter to use', '')
|
||||
.option('--trial-run', 'Only collect the matching tests and report them as passing')
|
||||
.option('--quiet', 'Suppress stdio', false)
|
||||
.option('--debug', 'Run tests in-process for debugging', false)
|
||||
.option('--output <outputDir>', 'Folder for output artifacts, default: test-results', path.join(process.cwd(), 'test-results'))
|
||||
.option('--timeout <timeout>', 'Specify test timeout threshold (in milliseconds), default: 10000', 10000)
|
||||
.option('-u, --update-snapshots', 'Use this flag to re-record every snapshot that fails during this test run')
|
||||
.action(async (command) => {
|
||||
// Collect files]
|
||||
const testDir = path.join(process.cwd(), command.args[0]);
|
||||
const files = collectFiles(testDir, '', command.args.slice(1));
|
||||
|
||||
const testCollector = new TestCollector(files, {
|
||||
forbidOnly: command.forbidOnly || undefined,
|
||||
grep: command.grep,
|
||||
timeout: command.timeout,
|
||||
});
|
||||
const rootSuite = testCollector.suite;
|
||||
if (command.forbidOnly && testCollector.hasOnly()) {
|
||||
console.error('=====================================');
|
||||
console.error(' --forbid-only found a focused test.');
|
||||
console.error('=====================================');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const total = rootSuite.total();
|
||||
if (!total) {
|
||||
console.error('=================');
|
||||
console.error(' no tests found.');
|
||||
console.error('=================');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Trial run does not need many workers, use one.
|
||||
const jobs = (command.trialRun || command.debug) ? 1 : command.jobs;
|
||||
const runner = new Runner(rootSuite, total, {
|
||||
debug: command.debug,
|
||||
quiet: command.quiet,
|
||||
grep: command.grep,
|
||||
jobs,
|
||||
outputDir: command.output,
|
||||
reporter: command.reporter,
|
||||
retries: command.retries,
|
||||
snapshotDir: path.join(testDir, '__snapshots__'),
|
||||
testDir,
|
||||
timeout: command.timeout,
|
||||
trialRun: command.trialRun,
|
||||
updateSnapshots: command.updateSnapshots
|
||||
});
|
||||
await runner.run(files);
|
||||
await runner.stop();
|
||||
process.exit(runner.stats.failures ? 1 : 0);
|
||||
});
|
||||
|
||||
program.parse(process.argv);
|
||||
|
||||
function collectFiles(testDir, dir, filters) {
|
||||
const fullDir = path.join(testDir, dir);
|
||||
if (fs.statSync(fullDir).isFile())
|
||||
return [fullDir];
|
||||
const files = [];
|
||||
for (const name of fs.readdirSync(fullDir)) {
|
||||
if (fs.lstatSync(path.join(fullDir, name)).isDirectory()) {
|
||||
files.push(...collectFiles(testDir, path.join(dir, name), filters));
|
||||
continue;
|
||||
}
|
||||
if (!name.endsWith('spec.ts'))
|
||||
continue;
|
||||
const relativeName = path.join(dir, name);
|
||||
const fullName = path.join(testDir, relativeName);
|
||||
if (!filters.length) {
|
||||
files.push(fullName);
|
||||
continue;
|
||||
}
|
||||
for (const filter of filters) {
|
||||
if (relativeName.includes(filter)) {
|
||||
files.push(fullName);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return files;
|
||||
}
|
||||
module.exports = { options, registerFixture, registerWorkerFixture, registerOption, registerOptionGenerator };
|
||||
|
|
|
|||
|
|
@ -15,12 +15,13 @@
|
|||
*/
|
||||
|
||||
const child_process = require('child_process');
|
||||
const crypto = require('crypto');
|
||||
const path = require('path');
|
||||
const { EventEmitter } = require('events');
|
||||
const Mocha = require('mocha');
|
||||
const builtinReporters = require('mocha/lib/reporters');
|
||||
const DotRunner = require('./dotReporter');
|
||||
const { computeWorkerHash } = require('./fixtures');
|
||||
const { lookupRegistrations } = require('./fixtures');
|
||||
|
||||
const constants = Mocha.Runner.constants;
|
||||
// Mocha runner does not remove uncaughtException listeners.
|
||||
|
|
@ -263,15 +264,13 @@ class InProcessWorker extends EventEmitter {
|
|||
|
||||
async init() {
|
||||
const { initializeImageMatcher } = require('./testRunner');
|
||||
const { initializeWorker } = require('./builtin.fixtures');
|
||||
initializeImageMatcher(this.runner._options);
|
||||
initializeWorker({ ...this.runner._options.outputDir, workerId: 0 });
|
||||
}
|
||||
|
||||
async run(entry) {
|
||||
delete require.cache[entry.file];
|
||||
const { TestRunner } = require('./testRunner');
|
||||
const testRunner = new TestRunner(entry, this.runner._options);
|
||||
const testRunner = new TestRunner(entry, this.runner._options, 0);
|
||||
for (const event of ['test', 'pending', 'pass', 'fail', 'done'])
|
||||
testRunner.on(event, this.emit.bind(this, event));
|
||||
testRunner.run();
|
||||
|
|
@ -297,4 +296,15 @@ function chunkFromParams(params) {
|
|||
return Buffer.from(params.buffer, 'base64');
|
||||
}
|
||||
|
||||
function computeWorkerHash(file) {
|
||||
// At this point, registrationsByFile contains all the files with worker fixture registrations.
|
||||
// For every test, build the require closure and map each file to fixtures declared in it.
|
||||
// This collection of fixtures is the fingerprint of the worker setup, a "worker hash".
|
||||
// Tests with the matching "worker hash" will reuse the same worker.
|
||||
const hash = crypto.createHash('sha1');
|
||||
for (const registration of lookupRegistrations(file, 'worker').values())
|
||||
hash.update(registration.location);
|
||||
return hash.digest('hex');
|
||||
}
|
||||
|
||||
module.exports = { Runner };
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ const GoldenUtils = require('./GoldenUtils');
|
|||
class NullReporter {}
|
||||
|
||||
class TestRunner extends EventEmitter {
|
||||
constructor(entry, options) {
|
||||
constructor(entry, options, workerId) {
|
||||
super();
|
||||
this.mocha = new Mocha({
|
||||
reporter: NullReporter,
|
||||
|
|
@ -53,20 +53,27 @@ class TestRunner extends EventEmitter {
|
|||
this._parsedGeneratorConfiguration = new Map();
|
||||
for (const {name, value} of this._configurationObject)
|
||||
this._parsedGeneratorConfiguration.set(name, value);
|
||||
this._parsedGeneratorConfiguration.set('parallelIndex', workerId);
|
||||
this._relativeTestFile = path.relative(options.testDir, this._file);
|
||||
this.mocha.addFile(this._file);
|
||||
}
|
||||
|
||||
async stop() {
|
||||
this._trialRun = true;
|
||||
const constants = Mocha.Runner.constants;
|
||||
return new Promise(f => this._runner.once(constants.EVENT_RUN_END, f));
|
||||
}
|
||||
|
||||
async run() {
|
||||
let callback;
|
||||
const result = new Promise(f => callback = f);
|
||||
setOptions(this._parsedGeneratorConfiguration);
|
||||
this.mocha.loadFiles();
|
||||
rerunRegistrations(this._file, 'test');
|
||||
const runner = this.mocha.run(callback);
|
||||
this._runner = this.mocha.run(callback);
|
||||
|
||||
const constants = Mocha.Runner.constants;
|
||||
runner.on(constants.EVENT_TEST_BEGIN, test => {
|
||||
this._runner.on(constants.EVENT_TEST_BEGIN, test => {
|
||||
relativeTestFile = this._relativeTestFile;
|
||||
if (this._failedWithError)
|
||||
return;
|
||||
|
|
@ -77,7 +84,7 @@ class TestRunner extends EventEmitter {
|
|||
this.emit('test', { test: this._serializeTest(test, ordinal) });
|
||||
});
|
||||
|
||||
runner.on(constants.EVENT_TEST_PENDING, test => {
|
||||
this._runner.on(constants.EVENT_TEST_PENDING, test => {
|
||||
if (this._failedWithError)
|
||||
return;
|
||||
const ordinal = ++this._currentOrdinal;
|
||||
|
|
@ -88,7 +95,7 @@ class TestRunner extends EventEmitter {
|
|||
this.emit('pending', { test: this._serializeTest(test, ordinal) });
|
||||
});
|
||||
|
||||
runner.on(constants.EVENT_TEST_PASS, test => {
|
||||
this._runner.on(constants.EVENT_TEST_PASS, test => {
|
||||
if (this._failedWithError)
|
||||
return;
|
||||
|
||||
|
|
@ -99,7 +106,7 @@ class TestRunner extends EventEmitter {
|
|||
this.emit('pass', { test: this._serializeTest(test, ordinal) });
|
||||
});
|
||||
|
||||
runner.on(constants.EVENT_TEST_FAIL, (test, error) => {
|
||||
this._runner.on(constants.EVENT_TEST_FAIL, (test, error) => {
|
||||
if (this._failedWithError)
|
||||
return;
|
||||
++this._failures;
|
||||
|
|
@ -110,12 +117,12 @@ class TestRunner extends EventEmitter {
|
|||
});
|
||||
});
|
||||
|
||||
runner.once(constants.EVENT_RUN_END, async () => {
|
||||
this._runner.once(constants.EVENT_RUN_END, async () => {
|
||||
this.emit('done', {
|
||||
stats: this._serializeStats(runner.stats),
|
||||
stats: this._serializeStats(this._runner.stats),
|
||||
error: this._failedWithError,
|
||||
remaining: [...this._remaining],
|
||||
total: runner.stats.tests
|
||||
total: this._runner.stats.tests
|
||||
});
|
||||
});
|
||||
await result;
|
||||
|
|
|
|||
|
|
@ -14,9 +14,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
const { gracefullyCloseAll } = require('../../lib/server/processLauncher');
|
||||
const { TestRunner, initializeImageMatcher, fixturePool } = require('./testRunner');
|
||||
const { initializeWorker } = require('./builtin.fixtures');
|
||||
|
||||
const util = require('util');
|
||||
|
||||
|
|
@ -44,22 +42,25 @@ process.on('disconnect', gracefullyCloseAndExit);
|
|||
process.on('SIGINT',() => {});
|
||||
process.on('SIGTERM',() => {});
|
||||
|
||||
let workerId;
|
||||
let testRunner;
|
||||
|
||||
process.on('message', async message => {
|
||||
if (message.method === 'init') {
|
||||
initializeWorker(message.params);
|
||||
workerId = message.params.workerId;
|
||||
initializeImageMatcher(message.params);
|
||||
return;
|
||||
}
|
||||
if (message.method === 'stop') {
|
||||
await fixturePool.teardownScope('worker');
|
||||
await gracefullyCloseAndExit();
|
||||
return;
|
||||
}
|
||||
if (message.method === 'run') {
|
||||
const testRunner = new TestRunner(message.params.entry, message.params.options);
|
||||
testRunner = new TestRunner(message.params.entry, message.params.options, workerId);
|
||||
for (const event of ['test', 'pending', 'pass', 'fail', 'done'])
|
||||
testRunner.on(event, sendMessageToParent.bind(null, event));
|
||||
await testRunner.run();
|
||||
testRunner = null;
|
||||
// Mocha runner adds these; if we don't remove them, we'll get a leak.
|
||||
process.removeAllListeners('uncaughtException');
|
||||
}
|
||||
|
|
@ -72,7 +73,9 @@ async function gracefullyCloseAndExit() {
|
|||
// Force exit after 30 seconds.
|
||||
setTimeout(() => process.exit(0), 30000);
|
||||
// Meanwhile, try to gracefully close all browsers.
|
||||
await gracefullyCloseAll();
|
||||
if (testRunner)
|
||||
await testRunner.stop();
|
||||
await fixturePool.teardownScope('worker');
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@
|
|||
|
||||
import './runner/builtin.fixtures';
|
||||
import './base.fixture';
|
||||
import { registerFixture } from './runner/fixtures';
|
||||
import { registerFixture } from './runner';
|
||||
import { Page } from '..';
|
||||
|
||||
import fs from 'fs';
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { registerFixture } from './runner/fixtures';
|
||||
import { registerFixture } from './runner';
|
||||
|
||||
declare global {
|
||||
interface FixtureState {
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@
|
|||
*/
|
||||
|
||||
import './test-runner-helper';
|
||||
import { registerFixture } from './runner/fixtures';
|
||||
import { registerFixture } from './runner';
|
||||
|
||||
registerFixture('helperFixture', async ({}, test) => {
|
||||
await test('helperFixture - overridden');
|
||||
|
|
|
|||
2
test/types.d.ts
vendored
2
test/types.d.ts
vendored
|
|
@ -32,8 +32,6 @@ type ItFunction<STATE> = ((name: string, inner: (state: STATE) => Promise<void>)
|
|||
};
|
||||
|
||||
interface WorkerState {
|
||||
parallelIndex: number;
|
||||
tmpDir: string;
|
||||
}
|
||||
|
||||
interface FixtureState {
|
||||
|
|
|
|||
Loading…
Reference in a new issue