chore: short-cut localUtils usage in JS client (#34690)

This commit is contained in:
Pavel Feldman 2025-02-10 14:19:58 -08:00 committed by GitHub
parent ad6444e14c
commit 2718ce7cbf
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
26 changed files with 549 additions and 399 deletions

View file

@ -8,7 +8,7 @@
**
[inprocess.ts]
common/
utils/
[outofprocess.ts]
client/

View file

@ -31,7 +31,7 @@ import type { Page } from './page';
import type * as types from './types';
import type * as api from '../../types/types';
import type { AndroidServerLauncherImpl } from '../androidServerImpl';
import type { Platform } from '../common/platform';
import type { Platform } from '../utils/platform';
import type * as channels from '@protocol/channels';
type Direction = 'down' | 'up' | 'left' | 'right';
@ -72,7 +72,7 @@ export class Android extends ChannelOwner<channels.AndroidChannel> implements ap
const headers = { 'x-playwright-browser': 'android', ...options.headers };
const localUtils = this._connection.localUtils();
const connectParams: channels.LocalUtilsConnectParams = { wsEndpoint, headers, slowMo: options.slowMo, timeout: options.timeout };
const { pipe } = await localUtils._channel.connect(connectParams);
const { pipe } = await localUtils.connect(connectParams);
const closePipe = () => pipe.close().catch(() => {});
const connection = new Connection(localUtils, this._platform, this._instrumentation);
connection.markAsRemote();

View file

@ -45,8 +45,8 @@ import type { BrowserType } from './browserType';
import type { BrowserContextOptions, Headers, LaunchOptions, StorageState, WaitForEventOptions } from './types';
import type * as structs from '../../types/structs';
import type * as api from '../../types/types';
import type { Platform } from '../common/platform';
import type { URLMatch } from '../utils/isomorphic/urlMatch';
import type { Platform } from '../utils/platform';
import type * as channels from '@protocol/channels';
export class BrowserContext extends ChannelOwner<channels.BrowserContextChannel> implements api.BrowserContext {
@ -485,7 +485,7 @@ export class BrowserContext extends ChannelOwner<channels.BrowserContextChannel>
const needCompressed = harParams.path.endsWith('.zip');
if (isCompressed && !needCompressed) {
await artifact.saveAs(harParams.path + '.tmp');
await this._connection.localUtils()._channel.harUnzip({ zipFile: harParams.path + '.tmp', harFile: harParams.path });
await this._connection.localUtils().harUnzip({ zipFile: harParams.path + '.tmp', harFile: harParams.path });
} else {
await artifact.saveAs(harParams.path);
}

View file

@ -133,7 +133,7 @@ export class BrowserType extends ChannelOwner<channels.BrowserTypeChannel> imple
};
if ((params as any).__testHookRedirectPortForwarding)
connectParams.socksProxyRedirectPortForTest = (params as any).__testHookRedirectPortForwarding;
const { pipe, headers: connectHeaders } = await localUtils._channel.connect(connectParams);
const { pipe, headers: connectHeaders } = await localUtils.connect(connectParams);
const closePipe = () => pipe.close().catch(() => {});
const connection = new Connection(localUtils, this._platform, this._instrumentation);
connection.markAsRemote();

View file

@ -24,8 +24,8 @@ import { zones } from '../utils/zones';
import type { ClientInstrumentation } from './clientInstrumentation';
import type { Connection } from './connection';
import type { Logger } from './types';
import type { Platform } from '../common/platform';
import type { ValidatorContext } from '../protocol/validator';
import type { Platform } from '../utils/platform';
import type * as channels from '@protocol/channels';
type Listener = (...args: any[]) => void;

View file

@ -18,7 +18,7 @@
import { isString } from '../utils/rtti';
import type * as types from './types';
import type { Platform } from '../common/platform';
import type { Platform } from '../utils/platform';
export function envObjectToArray(env: types.Env): { name: string, value: string }[] {
const result: { name: string, value: string }[] = [];

View file

@ -47,8 +47,8 @@ import { formatCallLog, rewriteErrorMessage } from '../utils/stackTrace';
import { zones } from '../utils/zones';
import type { ClientInstrumentation } from './clientInstrumentation';
import type { Platform } from '../common/platform';
import type { ValidatorContext } from '../protocol/validator';
import type { Platform } from '../utils/platform';
import type * as channels from '@protocol/channels';
class Root extends ChannelOwner<channels.RootChannel> {
@ -142,7 +142,7 @@ export class Connection extends EventEmitter {
const location = frames[0] ? { file: frames[0].file, line: frames[0].line, column: frames[0].column } : undefined;
const metadata: channels.Metadata = { apiName, location, internal: !apiName, stepId };
if (this._tracingCount && frames && type !== 'LocalUtils')
this._localUtils?._channel.addStackToTracingNoReply({ callData: { stack: frames, id } }).catch(() => {});
this._localUtils?.addStackToTracingNoReply({ callData: { stack: frames, id } }).catch(() => {});
// We need to exit zones before calling into the server, otherwise
// when we receive events from the server, we would be in an API zone.
zones.empty().run(() => this.onmessage({ ...message, metadata }));

View file

@ -18,7 +18,7 @@ import { JSHandle } from './jsHandle';
import { Page } from './page';
import type * as api from '../../types/types';
import type { Platform } from '../common/platform';
import type { Platform } from '../utils/platform';
import type * as channels from '@protocol/channels';
type ConsoleMessageLocation = channels.BrowserContextConsoleEvent['location'];

View file

@ -31,7 +31,7 @@ import type { Locator } from './locator';
import type { FilePayload, Rect, SelectOption, SelectOptionOptions } from './types';
import type * as structs from '../../types/structs';
import type * as api from '../../types/types';
import type { Platform } from '../common/platform';
import type { Platform } from '../utils/platform';
import type * as channels from '@protocol/channels';
const pipelineAsync = promisify(pipeline);
@ -192,12 +192,13 @@ export class ElementHandle<T extends Node = Node> extends JSHandle<T> implements
return value === undefined ? null : value;
}
async screenshot(options: Omit<channels.ElementHandleScreenshotOptions, 'mask'> & { path?: string, mask?: Locator[] } = {}): Promise<Buffer> {
async screenshot(options: Omit<channels.ElementHandleScreenshotOptions, 'mask'> & { path?: string, mask?: api.Locator[] } = {}): Promise<Buffer> {
const mask = options.mask as Locator[] | undefined;
const copy: channels.ElementHandleScreenshotOptions = { ...options, mask: undefined };
if (!copy.type)
copy.type = determineScreenshotType(options);
if (options.mask) {
copy.mask = options.mask.map(locator => ({
if (mask) {
copy.mask = mask.map(locator => ({
frame: locator._frame._channel,
selector: locator._selector,
}));

View file

@ -28,8 +28,8 @@ import type { Playwright } from './playwright';
import type { ClientCertificate, FilePayload, Headers, SetStorageState, StorageState } from './types';
import type { Serializable } from '../../types/structs';
import type * as api from '../../types/types';
import type { Platform } from '../common/platform';
import type { HeadersArray, NameValue } from '../common/types';
import type { Platform } from '../utils/platform';
import type * as channels from '@protocol/channels';
import type * as fs from 'fs';

View file

@ -31,7 +31,7 @@ export class HarRouter {
private _options: { urlMatch?: URLMatch; baseURL?: string; };
static async create(localUtils: LocalUtils, file: string, notFoundAction: HarNotFoundAction, options: { urlMatch?: URLMatch }): Promise<HarRouter> {
const { harId, error } = await localUtils._channel.harOpen({ file });
const { harId, error } = await localUtils.harOpen({ file });
if (error)
throw new Error(error);
return new HarRouter(localUtils, harId!, notFoundAction, options);
@ -47,7 +47,7 @@ export class HarRouter {
private async _handle(route: Route) {
const request = route.request();
const response = await this._localUtils._channel.harLookup({
const response = await this._localUtils.harLookup({
harId: this._harId,
url: request.url(),
method: request.method(),
@ -103,6 +103,6 @@ export class HarRouter {
}
dispose() {
this._localUtils._channel.harClose({ harId: this._harId }).catch(() => {});
this._localUtils.harClose({ harId: this._harId }).catch(() => {});
}
}

View file

@ -15,8 +15,10 @@
*/
import { ChannelOwner } from './channelOwner';
import * as localUtils from '../utils/localUtils';
import type { Size } from './types';
import type { HarBackend } from '../utils/harBackend';
import type * as channels from '@protocol/channels';
type DeviceDescriptor = {
@ -31,6 +33,8 @@ type Devices = { [name: string]: DeviceDescriptor };
export class LocalUtils extends ChannelOwner<channels.LocalUtilsChannel> {
readonly devices: Devices;
private _harBackends = new Map<string, HarBackend>();
private _stackSessions = new Map<string, localUtils.StackSession>();
constructor(parent: ChannelOwner, type: string, guid: string, initializer: channels.LocalUtilsInitializer) {
super(parent, type, guid, initializer);
@ -39,4 +43,40 @@ export class LocalUtils extends ChannelOwner<channels.LocalUtilsChannel> {
for (const { name, descriptor } of initializer.deviceDescriptors)
this.devices[name] = descriptor;
}
async zip(params: channels.LocalUtilsZipParams): Promise<void> {
return await localUtils.zip(this._platform, this._stackSessions, params);
}
async harOpen(params: channels.LocalUtilsHarOpenParams): Promise<channels.LocalUtilsHarOpenResult> {
return await localUtils.harOpen(this._harBackends, params);
}
async harLookup(params: channels.LocalUtilsHarLookupParams): Promise<channels.LocalUtilsHarLookupResult> {
return await localUtils.harLookup(this._harBackends, params);
}
async harClose(params: channels.LocalUtilsHarCloseParams): Promise<void> {
return await localUtils.harClose(this._harBackends, params);
}
async harUnzip(params: channels.LocalUtilsHarUnzipParams): Promise<void> {
return await localUtils.harUnzip(params);
}
async tracingStarted(params: channels.LocalUtilsTracingStartedParams): Promise<channels.LocalUtilsTracingStartedResult> {
return await localUtils.tracingStarted(this._stackSessions, params);
}
async traceDiscarded(params: channels.LocalUtilsTraceDiscardedParams): Promise<void> {
return await localUtils.traceDiscarded(this._platform, this._stackSessions, params);
}
async addStackToTracingNoReply(params: channels.LocalUtilsAddStackToTracingNoReplyParams): Promise<void> {
return await localUtils.addStackToTracingNoReply(this._stackSessions, params);
}
async connect(params: channels.LocalUtilsConnectParams): Promise<channels.LocalUtilsConnectResult> {
return await this._channel.connect(params);
}
}

View file

@ -17,7 +17,7 @@
import { ChannelOwner } from './channelOwner';
import { evaluationScript } from './clientHelper';
import { setTestIdAttribute, testIdAttributeName } from './locator';
import { nodePlatform } from '../common/platform';
import { nodePlatform } from '../utils/platform';
import type { SelectorEngine } from './types';
import type * as api from '../../types/types';

View file

@ -69,7 +69,7 @@ export class Tracing extends ChannelOwner<channels.TracingChannel> implements ap
this._isTracing = true;
this._connection.setIsTracing(true);
}
const result = await this._connection.localUtils()._channel.tracingStarted({ tracesDir: this._tracesDir, traceName });
const result = await this._connection.localUtils().tracingStarted({ tracesDir: this._tracesDir, traceName });
this._stacksId = result.stacksId;
}
@ -89,7 +89,7 @@ export class Tracing extends ChannelOwner<channels.TracingChannel> implements ap
// Not interested in artifacts.
await this._channel.tracingStopChunk({ mode: 'discard' });
if (this._stacksId)
await this._connection.localUtils()._channel.traceDiscarded({ stacksId: this._stacksId });
await this._connection.localUtils().traceDiscarded({ stacksId: this._stacksId });
return;
}
@ -97,7 +97,7 @@ export class Tracing extends ChannelOwner<channels.TracingChannel> implements ap
if (isLocal) {
const result = await this._channel.tracingStopChunk({ mode: 'entries' });
await this._connection.localUtils()._channel.zip({ zipFile: filePath, entries: result.entries!, mode: 'write', stacksId: this._stacksId, includeSources: this._includeSources });
await this._connection.localUtils().zip({ zipFile: filePath, entries: result.entries!, mode: 'write', stacksId: this._stacksId, includeSources: this._includeSources });
return;
}
@ -106,7 +106,7 @@ export class Tracing extends ChannelOwner<channels.TracingChannel> implements ap
// The artifact may be missing if the browser closed while stopping tracing.
if (!result.artifact) {
if (this._stacksId)
await this._connection.localUtils()._channel.traceDiscarded({ stacksId: this._stacksId });
await this._connection.localUtils().traceDiscarded({ stacksId: this._stacksId });
return;
}
@ -115,7 +115,7 @@ export class Tracing extends ChannelOwner<channels.TracingChannel> implements ap
await artifact.saveAs(filePath);
await artifact.delete();
await this._connection.localUtils()._channel.zip({ zipFile: filePath, entries: [], mode: 'append', stacksId: this._stacksId, includeSources: this._includeSources });
await this._connection.localUtils().zip({ zipFile: filePath, entries: [], mode: 'append', stacksId: this._stacksId, includeSources: this._includeSources });
}
_resetStackCounter() {

View file

@ -1,3 +1,4 @@
[*]
../utils/
../utilsBundle.ts
../utilsBundle.ts
../zipBundle.ts

View file

@ -0,0 +1,23 @@
/**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
export interface Progress {
log(message: string): void;
timeUntilDeadline(): number;
isRunning(): boolean;
cleanupWhenAborted(cleanup: () => any): void;
throwIfAborted(): void;
}

View file

@ -20,8 +20,8 @@ import { Connection } from './client/connection';
import { DispatcherConnection, PlaywrightDispatcher, RootDispatcher, createPlaywright } from './server';
import type { Playwright as PlaywrightAPI } from './client/playwright';
import type { Platform } from './common/platform';
import type { Language } from './utils';
import type { Platform } from './utils/platform';
export function createInProcessPlaywright(platform: Platform): PlaywrightAPI {
const playwright = createPlaywright({ sdkLanguage: (process.env.PW_LANG_NAME as Language | undefined) || 'javascript' });

View file

@ -14,7 +14,7 @@
* limitations under the License.
*/
import { nodePlatform } from './common/platform';
import { createInProcessPlaywright } from './inProcessFactory';
import { nodePlatform } from './utils/platform';
module.exports = createInProcessPlaywright(nodePlatform);

View file

@ -18,9 +18,9 @@ import * as childProcess from 'child_process';
import * as path from 'path';
import { Connection } from './client/connection';
import { nodePlatform } from './common/platform';
import { PipeTransport } from './protocol/transport';
import { ManualPromise } from './utils/manualPromise';
import { nodePlatform } from './utils/platform';
import type { Playwright } from './client/playwright';

View file

@ -14,46 +14,27 @@
* limitations under the License.
*/
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import { Dispatcher } from './dispatcher';
import { SdkObject } from '../../server/instrumentation';
import { assert, calculateSha1, createGuid } from '../../utils';
import { serializeClientSideCallMetadata } from '../../utils';
import { ManualPromise } from '../../utils/manualPromise';
import { fetchData } from '../../utils/network';
import * as localUtils from '../../utils/localUtils';
import { nodePlatform } from '../../utils/platform';
import { getUserAgent } from '../../utils/userAgent';
import { ZipFile } from '../../utils/zipFile';
import { yauzl, yazl } from '../../zipBundle';
import { deviceDescriptors as descriptors } from '../deviceDescriptors';
import { JsonPipeDispatcher } from '../dispatchers/jsonPipeDispatcher';
import { removeFolders } from '../fileUtils';
import { ProgressController } from '../progress';
import { SocksInterceptor } from '../socksInterceptor';
import { WebSocketTransport } from '../transport';
import type { HTTPRequestParams } from '../../utils/network';
import type { HarBackend } from '../../utils/harBackend';
import type { CallMetadata } from '../instrumentation';
import type { Playwright } from '../playwright';
import type { Progress } from '../progress';
import type { HeadersArray } from '../types';
import type { RootDispatcher } from './dispatcher';
import type * as channels from '@protocol/channels';
import type * as har from '@trace/har';
import type EventEmitter from 'events';
import type http from 'http';
export class LocalUtilsDispatcher extends Dispatcher<{ guid: string }, channels.LocalUtilsChannel, RootDispatcher> implements channels.LocalUtilsChannel {
_type_LocalUtils: boolean;
private _harBackends = new Map<string, HarBackend>();
private _stackSessions = new Map<string, {
file: string,
writer: Promise<void>,
tmpDir: string | undefined,
callStacks: channels.ClientSideCallMetadata[]
}>();
private _stackSessions = new Map<string, localUtils.StackSession>();
constructor(scope: RootDispatcher, playwright: Playwright) {
const localUtils = new SdkObject(playwright, 'localUtils', 'localUtils');
@ -66,139 +47,35 @@ export class LocalUtilsDispatcher extends Dispatcher<{ guid: string }, channels.
}
async zip(params: channels.LocalUtilsZipParams): Promise<void> {
const promise = new ManualPromise<void>();
const zipFile = new yazl.ZipFile();
(zipFile as any as EventEmitter).on('error', error => promise.reject(error));
const addFile = (file: string, name: string) => {
try {
if (fs.statSync(file).isFile())
zipFile.addFile(file, name);
} catch (e) {
}
};
for (const entry of params.entries)
addFile(entry.value, entry.name);
// Add stacks and the sources.
const stackSession = params.stacksId ? this._stackSessions.get(params.stacksId) : undefined;
if (stackSession?.callStacks.length) {
await stackSession.writer;
if (process.env.PW_LIVE_TRACE_STACKS) {
zipFile.addFile(stackSession.file, 'trace.stacks');
} else {
const buffer = Buffer.from(JSON.stringify(serializeClientSideCallMetadata(stackSession.callStacks)));
zipFile.addBuffer(buffer, 'trace.stacks');
}
}
// Collect sources from stacks.
if (params.includeSources) {
const sourceFiles = new Set<string>();
for (const { stack } of stackSession?.callStacks || []) {
if (!stack)
continue;
for (const { file } of stack)
sourceFiles.add(file);
}
for (const sourceFile of sourceFiles)
addFile(sourceFile, 'resources/src@' + calculateSha1(sourceFile) + '.txt');
}
if (params.mode === 'write') {
// New file, just compress the entries.
await fs.promises.mkdir(path.dirname(params.zipFile), { recursive: true });
zipFile.end(undefined, () => {
zipFile.outputStream.pipe(fs.createWriteStream(params.zipFile))
.on('close', () => promise.resolve())
.on('error', error => promise.reject(error));
});
await promise;
await this._deleteStackSession(params.stacksId);
return;
}
// File already exists. Repack and add new entries.
const tempFile = params.zipFile + '.tmp';
await fs.promises.rename(params.zipFile, tempFile);
yauzl.open(tempFile, (err, inZipFile) => {
if (err) {
promise.reject(err);
return;
}
assert(inZipFile);
let pendingEntries = inZipFile.entryCount;
inZipFile.on('entry', entry => {
inZipFile.openReadStream(entry, (err, readStream) => {
if (err) {
promise.reject(err);
return;
}
zipFile.addReadStream(readStream!, entry.fileName);
if (--pendingEntries === 0) {
zipFile.end(undefined, () => {
zipFile.outputStream.pipe(fs.createWriteStream(params.zipFile)).on('close', () => {
fs.promises.unlink(tempFile).then(() => {
promise.resolve();
}).catch(error => promise.reject(error));
});
});
}
});
});
});
await promise;
await this._deleteStackSession(params.stacksId);
return await localUtils.zip(nodePlatform, this._stackSessions, params);
}
async harOpen(params: channels.LocalUtilsHarOpenParams, metadata: CallMetadata): Promise<channels.LocalUtilsHarOpenResult> {
let harBackend: HarBackend;
if (params.file.endsWith('.zip')) {
const zipFile = new ZipFile(params.file);
const entryNames = await zipFile.entries();
const harEntryName = entryNames.find(e => e.endsWith('.har'));
if (!harEntryName)
return { error: 'Specified archive does not have a .har file' };
const har = await zipFile.read(harEntryName);
const harFile = JSON.parse(har.toString()) as har.HARFile;
harBackend = new HarBackend(harFile, null, zipFile);
} else {
const harFile = JSON.parse(await fs.promises.readFile(params.file, 'utf-8')) as har.HARFile;
harBackend = new HarBackend(harFile, path.dirname(params.file), null);
}
this._harBackends.set(harBackend.id, harBackend);
return { harId: harBackend.id };
return await localUtils.harOpen(this._harBackends, params);
}
async harLookup(params: channels.LocalUtilsHarLookupParams, metadata: CallMetadata): Promise<channels.LocalUtilsHarLookupResult> {
const harBackend = this._harBackends.get(params.harId);
if (!harBackend)
return { action: 'error', message: `Internal error: har was not opened` };
return await harBackend.lookup(params.url, params.method, params.headers, params.postData, params.isNavigationRequest);
return await localUtils.harLookup(this._harBackends, params);
}
async harClose(params: channels.LocalUtilsHarCloseParams, metadata: CallMetadata): Promise<void> {
const harBackend = this._harBackends.get(params.harId);
if (harBackend) {
this._harBackends.delete(harBackend.id);
harBackend.dispose();
}
return await localUtils.harClose(this._harBackends, params);
}
async harUnzip(params: channels.LocalUtilsHarUnzipParams, metadata: CallMetadata): Promise<void> {
const dir = path.dirname(params.zipFile);
const zipFile = new ZipFile(params.zipFile);
for (const entry of await zipFile.entries()) {
const buffer = await zipFile.read(entry);
if (entry === 'har.har')
await fs.promises.writeFile(params.harFile, buffer);
else
await fs.promises.writeFile(path.join(dir, entry), buffer);
}
zipFile.close();
await fs.promises.unlink(params.zipFile);
return await localUtils.harUnzip(params);
}
async tracingStarted(params: channels.LocalUtilsTracingStartedParams, metadata?: CallMetadata | undefined): Promise<channels.LocalUtilsTracingStartedResult> {
return await localUtils.tracingStarted(this._stackSessions, params);
}
async traceDiscarded(params: channels.LocalUtilsTraceDiscardedParams, metadata?: CallMetadata | undefined): Promise<void> {
return await localUtils.traceDiscarded(nodePlatform, this._stackSessions, params);
}
async addStackToTracingNoReply(params: channels.LocalUtilsAddStackToTracingNoReplyParams, metadata?: CallMetadata | undefined): Promise<void> {
return await localUtils.addStackToTracingNoReply(this._stackSessions, params);
}
async connect(params: channels.LocalUtilsConnectParams, metadata: CallMetadata): Promise<channels.LocalUtilsConnectResult> {
@ -210,7 +87,7 @@ export class LocalUtilsDispatcher extends Dispatcher<{ guid: string }, channels.
'x-playwright-proxy': params.exposeNetwork ?? '',
...params.headers,
};
const wsEndpoint = await urlToWSEndpoint(progress, params.wsEndpoint);
const wsEndpoint = await localUtils.urlToWSEndpoint(progress, params.wsEndpoint);
const transport = await WebSocketTransport.connect(progress, wsEndpoint, wsHeaders, true, 'x-playwright-debug-log');
const socksInterceptor = new SocksInterceptor(transport, params.exposeNetwork, params.socksProxyRedirectPortForTest);
@ -241,221 +118,4 @@ export class LocalUtilsDispatcher extends Dispatcher<{ guid: string }, channels.
return { pipe, headers: transport.headers };
}, params.timeout || 0);
}
async tracingStarted(params: channels.LocalUtilsTracingStartedParams, metadata?: CallMetadata | undefined): Promise<channels.LocalUtilsTracingStartedResult> {
let tmpDir = undefined;
if (!params.tracesDir)
tmpDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'playwright-tracing-'));
const traceStacksFile = path.join(params.tracesDir || tmpDir!, params.traceName + '.stacks');
this._stackSessions.set(traceStacksFile, { callStacks: [], file: traceStacksFile, writer: Promise.resolve(), tmpDir });
return { stacksId: traceStacksFile };
}
async traceDiscarded(params: channels.LocalUtilsTraceDiscardedParams, metadata?: CallMetadata | undefined): Promise<void> {
await this._deleteStackSession(params.stacksId);
}
async addStackToTracingNoReply(params: channels.LocalUtilsAddStackToTracingNoReplyParams, metadata?: CallMetadata | undefined): Promise<void> {
for (const session of this._stackSessions.values()) {
session.callStacks.push(params.callData);
if (process.env.PW_LIVE_TRACE_STACKS) {
session.writer = session.writer.then(() => {
const buffer = Buffer.from(JSON.stringify(serializeClientSideCallMetadata(session.callStacks)));
return fs.promises.writeFile(session.file, buffer);
});
}
}
}
private async _deleteStackSession(stacksId?: string) {
const session = stacksId ? this._stackSessions.get(stacksId) : undefined;
if (!session)
return;
await session.writer;
if (session.tmpDir)
await removeFolders([session.tmpDir]);
this._stackSessions.delete(stacksId!);
}
}
const redirectStatus = [301, 302, 303, 307, 308];
class HarBackend {
readonly id = createGuid();
private _harFile: har.HARFile;
private _zipFile: ZipFile | null;
private _baseDir: string | null;
constructor(harFile: har.HARFile, baseDir: string | null, zipFile: ZipFile | null) {
this._harFile = harFile;
this._baseDir = baseDir;
this._zipFile = zipFile;
}
async lookup(url: string, method: string, headers: HeadersArray, postData: Buffer | undefined, isNavigationRequest: boolean): Promise<{
action: 'error' | 'redirect' | 'fulfill' | 'noentry',
message?: string,
redirectURL?: string,
status?: number,
headers?: HeadersArray,
body?: Buffer }> {
let entry;
try {
entry = await this._harFindResponse(url, method, headers, postData);
} catch (e) {
return { action: 'error', message: 'HAR error: ' + e.message };
}
if (!entry)
return { action: 'noentry' };
// If navigation is being redirected, restart it with the final url to ensure the document's url changes.
if (entry.request.url !== url && isNavigationRequest)
return { action: 'redirect', redirectURL: entry.request.url };
const response = entry.response;
try {
const buffer = await this._loadContent(response.content);
return {
action: 'fulfill',
status: response.status,
headers: response.headers,
body: buffer,
};
} catch (e) {
return { action: 'error', message: e.message };
}
}
private async _loadContent(content: { text?: string, encoding?: string, _file?: string }): Promise<Buffer> {
const file = content._file;
let buffer: Buffer;
if (file) {
if (this._zipFile)
buffer = await this._zipFile.read(file);
else
buffer = await fs.promises.readFile(path.resolve(this._baseDir!, file));
} else {
buffer = Buffer.from(content.text || '', content.encoding === 'base64' ? 'base64' : 'utf-8');
}
return buffer;
}
private async _harFindResponse(url: string, method: string, headers: HeadersArray, postData: Buffer | undefined): Promise<har.Entry | undefined> {
const harLog = this._harFile.log;
const visited = new Set<har.Entry>();
while (true) {
const entries: har.Entry[] = [];
for (const candidate of harLog.entries) {
if (candidate.request.url !== url || candidate.request.method !== method)
continue;
if (method === 'POST' && postData && candidate.request.postData) {
const buffer = await this._loadContent(candidate.request.postData);
if (!buffer.equals(postData)) {
const boundary = multipartBoundary(headers);
if (!boundary)
continue;
const candidataBoundary = multipartBoundary(candidate.request.headers);
if (!candidataBoundary)
continue;
// Try to match multipart/form-data ignroing boundary as it changes between requests.
if (postData.toString().replaceAll(boundary, '') !== buffer.toString().replaceAll(candidataBoundary, ''))
continue;
}
}
entries.push(candidate);
}
if (!entries.length)
return;
let entry = entries[0];
// Disambiguate using headers - then one with most matching headers wins.
if (entries.length > 1) {
const list: { candidate: har.Entry, matchingHeaders: number }[] = [];
for (const candidate of entries) {
const matchingHeaders = countMatchingHeaders(candidate.request.headers, headers);
list.push({ candidate, matchingHeaders });
}
list.sort((a, b) => b.matchingHeaders - a.matchingHeaders);
entry = list[0].candidate;
}
if (visited.has(entry))
throw new Error(`Found redirect cycle for ${url}`);
visited.add(entry);
// Follow redirects.
const locationHeader = entry.response.headers.find(h => h.name.toLowerCase() === 'location');
if (redirectStatus.includes(entry.response.status) && locationHeader) {
const locationURL = new URL(locationHeader.value, url);
url = locationURL.toString();
if ((entry.response.status === 301 || entry.response.status === 302) && method === 'POST' ||
entry.response.status === 303 && !['GET', 'HEAD'].includes(method)) {
// HTTP-redirect fetch step 13 (https://fetch.spec.whatwg.org/#http-redirect-fetch)
method = 'GET';
}
continue;
}
return entry;
}
}
dispose() {
this._zipFile?.close();
}
}
function countMatchingHeaders(harHeaders: har.Header[], headers: HeadersArray): number {
const set = new Set(headers.map(h => h.name.toLowerCase() + ':' + h.value));
let matches = 0;
for (const h of harHeaders) {
if (set.has(h.name.toLowerCase() + ':' + h.value))
++matches;
}
return matches;
}
export async function urlToWSEndpoint(progress: Progress|undefined, endpointURL: string): Promise<string> {
if (endpointURL.startsWith('ws'))
return endpointURL;
progress?.log(`<ws preparing> retrieving websocket url from ${endpointURL}`);
const fetchUrl = new URL(endpointURL);
if (!fetchUrl.pathname.endsWith('/'))
fetchUrl.pathname += '/';
fetchUrl.pathname += 'json';
const json = await fetchData({
url: fetchUrl.toString(),
method: 'GET',
timeout: progress?.timeUntilDeadline() ?? 30_000,
headers: { 'User-Agent': getUserAgent() },
}, async (params: HTTPRequestParams, response: http.IncomingMessage) => {
return new Error(`Unexpected status ${response.statusCode} when connecting to ${fetchUrl.toString()}.\n` +
`This does not look like a Playwright server, try connecting via ws://.`);
});
progress?.throwIfAborted();
const wsUrl = new URL(endpointURL);
let wsEndpointPath = JSON.parse(json).wsEndpointPath;
if (wsEndpointPath.startsWith('/'))
wsEndpointPath = wsEndpointPath.substring(1);
if (!wsUrl.pathname.endsWith('/'))
wsUrl.pathname += '/';
wsUrl.pathname += wsEndpointPath;
wsUrl.protocol = wsUrl.protocol === 'https:' ? 'wss:' : 'ws:';
return wsUrl.toString();
}
function multipartBoundary(headers: HeadersArray) {
const contentType = headers.find(h => h.name.toLowerCase() === 'content-type');
if (!contentType?.value.includes('multipart/form-data'))
return undefined;
const boundary = contentType.value.match(/boundary=(\S+)/);
if (boundary)
return boundary[1];
return undefined;
}

View file

@ -19,14 +19,10 @@ import { assert, monotonicTime } from '../utils';
import { ManualPromise } from '../utils/manualPromise';
import type { CallMetadata, Instrumentation, SdkObject } from './instrumentation';
import type { Progress as CommonProgress } from '../common/progress';
import type { LogName } from '../utils/debugLogger';
export interface Progress {
log(message: string): void;
timeUntilDeadline(): number;
isRunning(): boolean;
cleanupWhenAborted(cleanup: () => any): void;
throwIfAborted(): void;
export interface Progress extends CommonProgress {
metadata: CallMetadata;
}

View file

@ -14,7 +14,7 @@
* limitations under the License.
*/
import type { Platform } from '../common/platform';
import type { Platform } from './platform';
export const fileUploadSizeLimit = 50 * 1024 * 1024;
@ -22,3 +22,9 @@ export async function mkdirIfNeeded(platform: Platform, filePath: string) {
// This will harmlessly throw on windows if the dirname is the root directory.
await platform.fs().promises.mkdir(platform.path().dirname(filePath), { recursive: true }).catch(() => {});
}
export async function removeFolders(platform: Platform, dirs: string[]): Promise<Error[]> {
return await Promise.all(dirs.map((dir: string) =>
platform.fs().promises.rm(dir, { recursive: true, force: true, maxRetries: 10 }).catch(e => e)
));
}

View file

@ -0,0 +1,175 @@
/**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the 'License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as fs from 'fs';
import * as path from 'path';
import { createGuid } from './crypto';
import { ZipFile } from './zipFile';
import type { HeadersArray } from '../common/types';
import type * as har from '@trace/har';
const redirectStatus = [301, 302, 303, 307, 308];
export class HarBackend {
readonly id = createGuid();
private _harFile: har.HARFile;
private _zipFile: ZipFile | null;
private _baseDir: string | null;
constructor(harFile: har.HARFile, baseDir: string | null, zipFile: ZipFile | null) {
this._harFile = harFile;
this._baseDir = baseDir;
this._zipFile = zipFile;
}
async lookup(url: string, method: string, headers: HeadersArray, postData: Buffer | undefined, isNavigationRequest: boolean): Promise<{
action: 'error' | 'redirect' | 'fulfill' | 'noentry',
message?: string,
redirectURL?: string,
status?: number,
headers?: HeadersArray,
body?: Buffer }> {
let entry;
try {
entry = await this._harFindResponse(url, method, headers, postData);
} catch (e) {
return { action: 'error', message: 'HAR error: ' + e.message };
}
if (!entry)
return { action: 'noentry' };
// If navigation is being redirected, restart it with the final url to ensure the document's url changes.
if (entry.request.url !== url && isNavigationRequest)
return { action: 'redirect', redirectURL: entry.request.url };
const response = entry.response;
try {
const buffer = await this._loadContent(response.content);
return {
action: 'fulfill',
status: response.status,
headers: response.headers,
body: buffer,
};
} catch (e) {
return { action: 'error', message: e.message };
}
}
private async _loadContent(content: { text?: string, encoding?: string, _file?: string }): Promise<Buffer> {
const file = content._file;
let buffer: Buffer;
if (file) {
if (this._zipFile)
buffer = await this._zipFile.read(file);
else
buffer = await fs.promises.readFile(path.resolve(this._baseDir!, file));
} else {
buffer = Buffer.from(content.text || '', content.encoding === 'base64' ? 'base64' : 'utf-8');
}
return buffer;
}
private async _harFindResponse(url: string, method: string, headers: HeadersArray, postData: Buffer | undefined): Promise<har.Entry | undefined> {
const harLog = this._harFile.log;
const visited = new Set<har.Entry>();
while (true) {
const entries: har.Entry[] = [];
for (const candidate of harLog.entries) {
if (candidate.request.url !== url || candidate.request.method !== method)
continue;
if (method === 'POST' && postData && candidate.request.postData) {
const buffer = await this._loadContent(candidate.request.postData);
if (!buffer.equals(postData)) {
const boundary = multipartBoundary(headers);
if (!boundary)
continue;
const candidataBoundary = multipartBoundary(candidate.request.headers);
if (!candidataBoundary)
continue;
// Try to match multipart/form-data ignroing boundary as it changes between requests.
if (postData.toString().replaceAll(boundary, '') !== buffer.toString().replaceAll(candidataBoundary, ''))
continue;
}
}
entries.push(candidate);
}
if (!entries.length)
return;
let entry = entries[0];
// Disambiguate using headers - then one with most matching headers wins.
if (entries.length > 1) {
const list: { candidate: har.Entry, matchingHeaders: number }[] = [];
for (const candidate of entries) {
const matchingHeaders = countMatchingHeaders(candidate.request.headers, headers);
list.push({ candidate, matchingHeaders });
}
list.sort((a, b) => b.matchingHeaders - a.matchingHeaders);
entry = list[0].candidate;
}
if (visited.has(entry))
throw new Error(`Found redirect cycle for ${url}`);
visited.add(entry);
// Follow redirects.
const locationHeader = entry.response.headers.find(h => h.name.toLowerCase() === 'location');
if (redirectStatus.includes(entry.response.status) && locationHeader) {
const locationURL = new URL(locationHeader.value, url);
url = locationURL.toString();
if ((entry.response.status === 301 || entry.response.status === 302) && method === 'POST' ||
entry.response.status === 303 && !['GET', 'HEAD'].includes(method)) {
// HTTP-redirect fetch step 13 (https://fetch.spec.whatwg.org/#http-redirect-fetch)
method = 'GET';
}
continue;
}
return entry;
}
}
dispose() {
this._zipFile?.close();
}
}
function countMatchingHeaders(harHeaders: har.Header[], headers: HeadersArray): number {
const set = new Set(headers.map(h => h.name.toLowerCase() + ':' + h.value));
let matches = 0;
for (const h of harHeaders) {
if (set.has(h.name.toLowerCase() + ':' + h.value))
++matches;
}
return matches;
}
function multipartBoundary(headers: HeadersArray) {
const contentType = headers.find(h => h.name.toLowerCase() === 'content-type');
if (!contentType?.value.includes('multipart/form-data'))
return undefined;
const boundary = contentType.value.match(/boundary=(\S+)/);
if (boundary)
return boundary[1];
return undefined;
}

View file

@ -0,0 +1,248 @@
/**
* Copyright (c) Microsoft Corporation.
*
* Licensed under the Apache License, Version 2.0 (the 'License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import { removeFolders } from './fileUtils';
import { HarBackend } from './harBackend';
import { ManualPromise } from './manualPromise';
import { fetchData } from './network';
import { getUserAgent } from './userAgent';
import { ZipFile } from './zipFile';
import { yauzl, yazl } from '../zipBundle';
import { serializeClientSideCallMetadata } from '.';
import { assert, calculateSha1 } from '.';
import type { HTTPRequestParams } from './network';
import type { Platform } from './platform';
import type { Progress } from '../common/progress';
import type * as channels from '@protocol/channels';
import type * as har from '@trace/har';
import type EventEmitter from 'events';
import type http from 'http';
export type StackSession = {
file: string;
writer: Promise<void>;
tmpDir: string | undefined;
callStacks: channels.ClientSideCallMetadata[];
};
export async function zip(platform: Platform, stackSessions: Map<string, StackSession>, params: channels.LocalUtilsZipParams): Promise<void> {
const promise = new ManualPromise<void>();
const zipFile = new yazl.ZipFile();
(zipFile as any as EventEmitter).on('error', error => promise.reject(error));
const addFile = (file: string, name: string) => {
try {
if (fs.statSync(file).isFile())
zipFile.addFile(file, name);
} catch (e) {
}
};
for (const entry of params.entries)
addFile(entry.value, entry.name);
// Add stacks and the sources.
const stackSession = params.stacksId ? stackSessions.get(params.stacksId) : undefined;
if (stackSession?.callStacks.length) {
await stackSession.writer;
if (process.env.PW_LIVE_TRACE_STACKS) {
zipFile.addFile(stackSession.file, 'trace.stacks');
} else {
const buffer = Buffer.from(JSON.stringify(serializeClientSideCallMetadata(stackSession.callStacks)));
zipFile.addBuffer(buffer, 'trace.stacks');
}
}
// Collect sources from stacks.
if (params.includeSources) {
const sourceFiles = new Set<string>();
for (const { stack } of stackSession?.callStacks || []) {
if (!stack)
continue;
for (const { file } of stack)
sourceFiles.add(file);
}
for (const sourceFile of sourceFiles)
addFile(sourceFile, 'resources/src@' + calculateSha1(sourceFile) + '.txt');
}
if (params.mode === 'write') {
// New file, just compress the entries.
await fs.promises.mkdir(path.dirname(params.zipFile), { recursive: true });
zipFile.end(undefined, () => {
zipFile.outputStream.pipe(fs.createWriteStream(params.zipFile))
.on('close', () => promise.resolve())
.on('error', error => promise.reject(error));
});
await promise;
await deleteStackSession(platform, stackSessions, params.stacksId);
return;
}
// File already exists. Repack and add new entries.
const tempFile = params.zipFile + '.tmp';
await fs.promises.rename(params.zipFile, tempFile);
yauzl.open(tempFile, (err, inZipFile) => {
if (err) {
promise.reject(err);
return;
}
assert(inZipFile);
let pendingEntries = inZipFile.entryCount;
inZipFile.on('entry', entry => {
inZipFile.openReadStream(entry, (err, readStream) => {
if (err) {
promise.reject(err);
return;
}
zipFile.addReadStream(readStream!, entry.fileName);
if (--pendingEntries === 0) {
zipFile.end(undefined, () => {
zipFile.outputStream.pipe(fs.createWriteStream(params.zipFile)).on('close', () => {
fs.promises.unlink(tempFile).then(() => {
promise.resolve();
}).catch(error => promise.reject(error));
});
});
}
});
});
});
await promise;
await deleteStackSession(platform, stackSessions, params.stacksId);
}
async function deleteStackSession(platform: Platform, stackSessions: Map<string, StackSession>, stacksId?: string) {
const session = stacksId ? stackSessions.get(stacksId) : undefined;
if (!session)
return;
await session.writer;
if (session.tmpDir)
await removeFolders(platform, [session.tmpDir]);
stackSessions.delete(stacksId!);
}
export async function harOpen(harBackends: Map<string, HarBackend>, params: channels.LocalUtilsHarOpenParams): Promise<channels.LocalUtilsHarOpenResult> {
let harBackend: HarBackend;
if (params.file.endsWith('.zip')) {
const zipFile = new ZipFile(params.file);
const entryNames = await zipFile.entries();
const harEntryName = entryNames.find(e => e.endsWith('.har'));
if (!harEntryName)
return { error: 'Specified archive does not have a .har file' };
const har = await zipFile.read(harEntryName);
const harFile = JSON.parse(har.toString()) as har.HARFile;
harBackend = new HarBackend(harFile, null, zipFile);
} else {
const harFile = JSON.parse(await fs.promises.readFile(params.file, 'utf-8')) as har.HARFile;
harBackend = new HarBackend(harFile, path.dirname(params.file), null);
}
harBackends.set(harBackend.id, harBackend);
return { harId: harBackend.id };
}
export async function harLookup(harBackends: Map<string, HarBackend>, params: channels.LocalUtilsHarLookupParams): Promise<channels.LocalUtilsHarLookupResult> {
const harBackend = harBackends.get(params.harId);
if (!harBackend)
return { action: 'error', message: `Internal error: har was not opened` };
return await harBackend.lookup(params.url, params.method, params.headers, params.postData, params.isNavigationRequest);
}
export async function harClose(harBackends: Map<string, HarBackend>, params: channels.LocalUtilsHarCloseParams): Promise<void> {
const harBackend = harBackends.get(params.harId);
if (harBackend) {
harBackends.delete(harBackend.id);
harBackend.dispose();
}
}
export async function harUnzip(params: channels.LocalUtilsHarUnzipParams): Promise<void> {
const dir = path.dirname(params.zipFile);
const zipFile = new ZipFile(params.zipFile);
for (const entry of await zipFile.entries()) {
const buffer = await zipFile.read(entry);
if (entry === 'har.har')
await fs.promises.writeFile(params.harFile, buffer);
else
await fs.promises.writeFile(path.join(dir, entry), buffer);
}
zipFile.close();
await fs.promises.unlink(params.zipFile);
}
export async function tracingStarted(stackSessions: Map<string, StackSession>, params: channels.LocalUtilsTracingStartedParams): Promise<channels.LocalUtilsTracingStartedResult> {
let tmpDir = undefined;
if (!params.tracesDir)
tmpDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'playwright-tracing-'));
const traceStacksFile = path.join(params.tracesDir || tmpDir!, params.traceName + '.stacks');
stackSessions.set(traceStacksFile, { callStacks: [], file: traceStacksFile, writer: Promise.resolve(), tmpDir });
return { stacksId: traceStacksFile };
}
export async function traceDiscarded(platform: Platform, stackSessions: Map<string, StackSession>, params: channels.LocalUtilsTraceDiscardedParams): Promise<void> {
await deleteStackSession(platform, stackSessions, params.stacksId);
}
export async function addStackToTracingNoReply(stackSessions: Map<string, StackSession>, params: channels.LocalUtilsAddStackToTracingNoReplyParams): Promise<void> {
for (const session of stackSessions.values()) {
session.callStacks.push(params.callData);
if (process.env.PW_LIVE_TRACE_STACKS) {
session.writer = session.writer.then(() => {
const buffer = Buffer.from(JSON.stringify(serializeClientSideCallMetadata(session.callStacks)));
return fs.promises.writeFile(session.file, buffer);
});
}
}
}
export async function urlToWSEndpoint(progress: Progress | undefined, endpointURL: string): Promise<string> {
if (endpointURL.startsWith('ws'))
return endpointURL;
progress?.log(`<ws preparing> retrieving websocket url from ${endpointURL}`);
const fetchUrl = new URL(endpointURL);
if (!fetchUrl.pathname.endsWith('/'))
fetchUrl.pathname += '/';
fetchUrl.pathname += 'json';
const json = await fetchData({
url: fetchUrl.toString(),
method: 'GET',
timeout: progress?.timeUntilDeadline() ?? 30_000,
headers: { 'User-Agent': getUserAgent() },
}, async (params: HTTPRequestParams, response: http.IncomingMessage) => {
return new Error(`Unexpected status ${response.statusCode} when connecting to ${fetchUrl.toString()}.\n` +
`This does not look like a Playwright server, try connecting via ws://.`);
});
progress?.throwIfAborted();
const wsUrl = new URL(endpointURL);
let wsEndpointPath = JSON.parse(json).wsEndpointPath;
if (wsEndpointPath.startsWith('/'))
wsEndpointPath = wsEndpointPath.substring(1);
if (!wsUrl.pathname.endsWith('/'))
wsUrl.pathname += '/';
wsUrl.pathname += wsEndpointPath;
wsUrl.protocol = wsUrl.protocol === 'https:' ? 'wss:' : 'ws:';
return wsUrl.toString();
}

View file

@ -22,7 +22,7 @@ import type { Source } from '../../../packages/recorder/src/recorderTypes';
import type { CommonFixtures, TestChildProcess } from '../../config/commonFixtures';
import { stripAnsi } from '../../config/utils';
import { expect } from '@playwright/test';
import { nodePlatform } from '../../../packages/playwright-core/lib/common/platform';
import { nodePlatform } from '../../../packages/playwright-core/lib/utils/platform';
export { expect } from '@playwright/test';
type CLITestArgs = {