chore: implement compilation cache (#20597)
This commit is contained in:
parent
5dd2b1ef0c
commit
8b5e55d432
114
packages/playwright-test/src/common/compilationCache.ts
Normal file
114
packages/playwright-test/src/common/compilationCache.ts
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
/**
|
||||
* Copyright (c) Microsoft Corporation.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import { sourceMapSupport } from '../utilsBundle';
|
||||
import { isWorkerProcess } from './globals';
|
||||
|
||||
export type MemoryCache = {
|
||||
codePath: string;
|
||||
sourceMapPath: string;
|
||||
moduleUrl?: string;
|
||||
};
|
||||
|
||||
const version = 13;
|
||||
const cacheDir = process.env.PWTEST_CACHE_DIR || path.join(os.tmpdir(), 'playwright-transform-cache');
|
||||
|
||||
const sourceMaps: Map<string, string> = new Map();
|
||||
const memoryCache = new Map<string, MemoryCache>();
|
||||
|
||||
Error.stackTraceLimit = 200;
|
||||
|
||||
sourceMapSupport.install({
|
||||
environment: 'node',
|
||||
handleUncaughtExceptions: false,
|
||||
retrieveSourceMap(source) {
|
||||
if (!sourceMaps.has(source))
|
||||
return null;
|
||||
const sourceMapPath = sourceMaps.get(source)!;
|
||||
if (!fs.existsSync(sourceMapPath))
|
||||
return null;
|
||||
return {
|
||||
map: JSON.parse(fs.readFileSync(sourceMapPath, 'utf-8')),
|
||||
url: source
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
function _innerAddToCompilationCache(filename: string, options: { codePath: string, sourceMapPath: string, moduleUrl?: string }) {
|
||||
sourceMaps.set(options.moduleUrl || filename, options.sourceMapPath);
|
||||
memoryCache.set(filename, options);
|
||||
}
|
||||
|
||||
export function getFromCompilationCache(filename: string, code: string, moduleUrl?: string): { cachedCode?: string, addToCache?: (code: string, map?: any) => void } {
|
||||
// First check the memory cache by filename, this cache will always work in the worker,
|
||||
// because we just compiled this file in the loader.
|
||||
const cache = memoryCache.get(filename);
|
||||
if (cache?.codePath)
|
||||
return { cachedCode: fs.readFileSync(cache.codePath, 'utf-8') };
|
||||
|
||||
if (isWorkerProcess())
|
||||
throw new Error('Internal error: unexpected file imported in the worker: ' + filename);
|
||||
|
||||
// Then do the disk cache, this cache works between the Playwright Test runs.
|
||||
const isModule = !!moduleUrl;
|
||||
const cachePath = calculateCachePath(code, filename, isModule);
|
||||
const codePath = cachePath + '.js';
|
||||
const sourceMapPath = cachePath + '.map';
|
||||
if (fs.existsSync(codePath)) {
|
||||
_innerAddToCompilationCache(filename, { codePath, sourceMapPath, moduleUrl });
|
||||
return { cachedCode: fs.readFileSync(codePath, 'utf8') };
|
||||
}
|
||||
|
||||
return {
|
||||
addToCache: (code: string, map: any) => {
|
||||
fs.mkdirSync(path.dirname(cachePath), { recursive: true });
|
||||
if (map)
|
||||
fs.writeFileSync(sourceMapPath, JSON.stringify(map), 'utf8');
|
||||
fs.writeFileSync(codePath, code, 'utf8');
|
||||
_innerAddToCompilationCache(filename, { codePath, sourceMapPath, moduleUrl });
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export function serializeCompilationCache(): any {
|
||||
return {
|
||||
sourceMaps: [...sourceMaps.entries()],
|
||||
memoryCache: [...memoryCache.entries()],
|
||||
};
|
||||
}
|
||||
|
||||
export function initializeCompilationCache(payload: any) {
|
||||
for (const entry of payload.sourceMaps)
|
||||
sourceMaps.set(entry[0], entry[1]);
|
||||
for (const entry of payload.memoryCache)
|
||||
memoryCache.set(entry[0], entry[1]);
|
||||
}
|
||||
|
||||
function calculateCachePath(content: string, filePath: string, isModule: boolean): string {
|
||||
const hash = crypto.createHash('sha1')
|
||||
.update(process.env.PW_TEST_SOURCE_TRANSFORM || '')
|
||||
.update(isModule ? 'esm' : 'no_esm')
|
||||
.update(content)
|
||||
.update(filePath)
|
||||
.update(String(version))
|
||||
.digest('hex');
|
||||
const fileName = path.basename(filePath, path.extname(filePath)).replace(/\W/g, '') + '_' + hash;
|
||||
return path.join(cacheDir, hash[0] + hash[1], fileName);
|
||||
}
|
||||
|
|
@ -42,3 +42,13 @@ export function currentExpectTimeout(options: { timeout?: number }) {
|
|||
defaultExpectTimeout = 5000;
|
||||
return defaultExpectTimeout;
|
||||
}
|
||||
|
||||
let _isWorkerProcess = false;
|
||||
|
||||
export function setIsWorkerProcess() {
|
||||
_isWorkerProcess = true;
|
||||
}
|
||||
|
||||
export function isWorkerProcess() {
|
||||
return _isWorkerProcess;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import { serializeCompilationCache } from './compilationCache';
|
||||
import type { FullConfigInternal, TestInfoError, TestStatus } from './types';
|
||||
|
||||
export type ConfigCLIOverrides = {
|
||||
|
|
@ -39,6 +40,7 @@ export type SerializedConfig = {
|
|||
configFile: string | undefined;
|
||||
configDir: string;
|
||||
configCLIOverrides: ConfigCLIOverrides;
|
||||
compilationCache: any;
|
||||
};
|
||||
|
||||
export type TtyParams = {
|
||||
|
|
@ -126,6 +128,7 @@ export function serializeConfig(config: FullConfigInternal): SerializedConfig {
|
|||
configFile: config.configFile,
|
||||
configDir: config._internal.configDir,
|
||||
configCLIOverrides: config._internal.configCLIOverrides,
|
||||
compilationCache: serializeCompilationCache(),
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,8 +14,6 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import crypto from 'crypto';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { sourceMapSupport, pirates } from '../utilsBundle';
|
||||
|
|
@ -26,10 +24,7 @@ import { tsConfigLoader } from '../third_party/tsconfig-loader';
|
|||
import Module from 'module';
|
||||
import type { BabelTransformFunction } from './babelBundle';
|
||||
import { fileIsModule } from '../util';
|
||||
|
||||
const version = 13;
|
||||
const cacheDir = process.env.PWTEST_CACHE_DIR || path.join(os.tmpdir(), 'playwright-transform-cache');
|
||||
const sourceMaps: Map<string, string> = new Map();
|
||||
import { getFromCompilationCache } from './compilationCache';
|
||||
|
||||
type ParsedTsConfigData = {
|
||||
absoluteBaseUrl: string;
|
||||
|
|
@ -38,36 +33,6 @@ type ParsedTsConfigData = {
|
|||
};
|
||||
const cachedTSConfigs = new Map<string, ParsedTsConfigData | undefined>();
|
||||
|
||||
Error.stackTraceLimit = 200;
|
||||
|
||||
sourceMapSupport.install({
|
||||
environment: 'node',
|
||||
handleUncaughtExceptions: false,
|
||||
retrieveSourceMap(source) {
|
||||
if (!sourceMaps.has(source))
|
||||
return null;
|
||||
const sourceMapPath = sourceMaps.get(source)!;
|
||||
if (!fs.existsSync(sourceMapPath))
|
||||
return null;
|
||||
return {
|
||||
map: JSON.parse(fs.readFileSync(sourceMapPath, 'utf-8')),
|
||||
url: source
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
function calculateCachePath(content: string, filePath: string, isModule: boolean): string {
|
||||
const hash = crypto.createHash('sha1')
|
||||
.update(process.env.PW_TEST_SOURCE_TRANSFORM || '')
|
||||
.update(isModule ? 'esm' : 'no_esm')
|
||||
.update(content)
|
||||
.update(filePath)
|
||||
.update(String(version))
|
||||
.digest('hex');
|
||||
const fileName = path.basename(filePath, path.extname(filePath)).replace(/\W/g, '') + '_' + hash;
|
||||
return path.join(cacheDir, hash[0] + hash[1], fileName);
|
||||
}
|
||||
|
||||
function validateTsConfig(tsconfig: TsConfigLoaderResult): ParsedTsConfigData | undefined {
|
||||
if (!tsconfig.tsConfigPath || !tsconfig.baseUrl)
|
||||
return;
|
||||
|
|
@ -181,33 +146,26 @@ export function js2ts(resolved: string): string | undefined {
|
|||
|
||||
export function transformHook(code: string, filename: string, moduleUrl?: string): string {
|
||||
// If we are not TypeScript and there is no applicable preprocessor - bail out.
|
||||
const isModule = !!moduleUrl;
|
||||
const { cachedCode, addToCache } = getFromCompilationCache(filename, code, moduleUrl);
|
||||
if (cachedCode)
|
||||
return cachedCode;
|
||||
|
||||
const isTypeScript = filename.endsWith('.ts') || filename.endsWith('.tsx');
|
||||
const hasPreprocessor =
|
||||
process.env.PW_TEST_SOURCE_TRANSFORM &&
|
||||
process.env.PW_TEST_SOURCE_TRANSFORM_SCOPE &&
|
||||
process.env.PW_TEST_SOURCE_TRANSFORM_SCOPE.split(pathSeparator).some(f => filename.startsWith(f));
|
||||
|
||||
const cachePath = calculateCachePath(code, filename, isModule);
|
||||
const codePath = cachePath + '.js';
|
||||
const sourceMapPath = cachePath + '.map';
|
||||
sourceMaps.set(moduleUrl || filename, sourceMapPath);
|
||||
if (!process.env.PW_IGNORE_COMPILE_CACHE && fs.existsSync(codePath))
|
||||
return fs.readFileSync(codePath, 'utf8');
|
||||
// We don't use any browserslist data, but babel checks it anyway.
|
||||
// Silence the annoying warning.
|
||||
process.env.BROWSERSLIST_IGNORE_OLD_DATA = 'true';
|
||||
|
||||
try {
|
||||
const { babelTransform }: { babelTransform: BabelTransformFunction } = require('./babelBundle');
|
||||
const result = babelTransform(filename, isTypeScript, isModule, hasPreprocessor ? scriptPreprocessor : undefined, [require.resolve('./tsxTransform')]);
|
||||
if (result.code) {
|
||||
fs.mkdirSync(path.dirname(cachePath), { recursive: true });
|
||||
if (result.map)
|
||||
fs.writeFileSync(sourceMapPath, JSON.stringify(result.map), 'utf8');
|
||||
fs.writeFileSync(codePath, result.code, 'utf8');
|
||||
}
|
||||
return result.code || '';
|
||||
const { code, map } = babelTransform(filename, isTypeScript, !!moduleUrl, hasPreprocessor ? scriptPreprocessor : undefined, [require.resolve('./tsxTransform')]);
|
||||
if (code)
|
||||
addToCache!(code, map);
|
||||
return code || '';
|
||||
} catch (e) {
|
||||
// Re-throw error with a playwright-test stack
|
||||
// that could be filtered out.
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ import { colors, rimraf } from 'playwright-core/lib/utilsBundle';
|
|||
import util from 'util';
|
||||
import { debugTest, formatLocation, relativeFilePath, serializeError } from '../util';
|
||||
import type { TestBeginPayload, TestEndPayload, RunPayload, DonePayload, WorkerInitParams, TeardownErrorsPayload, TestOutputPayload } from '../common/ipc';
|
||||
import { setCurrentTestInfo } from '../common/globals';
|
||||
import { setCurrentTestInfo, setIsWorkerProcess } from '../common/globals';
|
||||
import { ConfigLoader } from '../common/configLoader';
|
||||
import type { Suite, TestCase } from '../common/test';
|
||||
import type { Annotation, FullConfigInternal, FullProjectInternal, TestInfoError } from '../common/types';
|
||||
|
|
@ -31,6 +31,7 @@ import { ProcessRunner } from '../common/process';
|
|||
import { loadTestFile } from '../common/testLoader';
|
||||
import { buildFileSuiteForProject, filterTestsRemoveEmptySuites } from '../common/suiteUtils';
|
||||
import { PoolBuilder } from '../common/poolBuilder';
|
||||
import { initializeCompilationCache } from '../common/compilationCache';
|
||||
|
||||
const removeFolderAsync = util.promisify(rimraf);
|
||||
|
||||
|
|
@ -65,6 +66,8 @@ export class WorkerMain extends ProcessRunner {
|
|||
super();
|
||||
process.env.TEST_WORKER_INDEX = String(params.workerIndex);
|
||||
process.env.TEST_PARALLEL_INDEX = String(params.parallelIndex);
|
||||
setIsWorkerProcess();
|
||||
initializeCompilationCache(params.config.compilationCache);
|
||||
|
||||
this._params = params;
|
||||
this._fixtureRunner = new FixtureRunner();
|
||||
|
|
|
|||
Loading…
Reference in a new issue