chore: simplify server screencast code (#4501)

Currently, we always throw from FrameSession._stopScreencast
when not running with video, and immediately catch it in
CRPage.didClose (thanks to debugger to point that).

Overall, we have code prepared for start/stop API, which
we never did, so it makes sense to simplify code a bit,
and throw if something goes wrong.
This commit is contained in:
Dmitry Gozman 2020-11-19 19:26:53 -08:00 committed by GitHub
parent 5e6eed0d87
commit aea106b28e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 34 additions and 78 deletions

View file

@ -217,14 +217,6 @@ export class CRPage implements PageDelegate {
await this._mainFrameSession._client.send('Emulation.setDefaultBackgroundColorOverride', { color }); await this._mainFrameSession._client.send('Emulation.setDefaultBackgroundColorOverride', { color });
} }
async startScreencast(options: types.PageScreencastOptions): Promise<void> {
await this._mainFrameSession._startScreencast(createGuid(), options);
}
async stopScreencast(): Promise<void> {
await this._mainFrameSession._stopScreencast();
}
async takeScreenshot(format: 'png' | 'jpeg', documentRect: types.Rect | undefined, viewportRect: types.Rect | undefined, quality: number | undefined): Promise<Buffer> { async takeScreenshot(format: 'png' | 'jpeg', documentRect: types.Rect | undefined, viewportRect: types.Rect | undefined, quality: number | undefined): Promise<Buffer> {
const { visualViewport } = await this._mainFrameSession._client.send('Page.getLayoutMetrics'); const { visualViewport } = await this._mainFrameSession._client.send('Page.getLayoutMetrics');
if (!documentRect) { if (!documentRect) {
@ -335,7 +327,6 @@ class FrameSession {
private _swappedIn = false; private _swappedIn = false;
private _videoRecorder: VideoRecorder | null = null; private _videoRecorder: VideoRecorder | null = null;
private _screencastId: string | null = null; private _screencastId: string | null = null;
private _screencastState: 'stopped' | 'starting' | 'started' = 'stopped';
constructor(crPage: CRPage, client: CRSession, targetId: string, parentSession: FrameSession | null) { constructor(crPage: CRPage, client: CRSession, targetId: string, parentSession: FrameSession | null) {
this._client = client; this._client = client;
@ -777,44 +768,30 @@ class FrameSession {
} }
async _startScreencast(screencastId: string, options: types.PageScreencastOptions): Promise<void> { async _startScreencast(screencastId: string, options: types.PageScreencastOptions): Promise<void> {
if (this._screencastState !== 'stopped') assert(!this._screencastId);
throw new Error('Already started'); this._videoRecorder = await VideoRecorder.launch(options);
const videoRecorder = await VideoRecorder.launch(options); this._screencastId = screencastId;
this._screencastState = 'starting'; const gotFirstFrame = new Promise(f => this._client.once('Page.screencastFrame', f));
try { await this._client.send('Page.startScreencast', {
this._screencastState = 'started'; format: 'jpeg',
this._videoRecorder = videoRecorder; quality: 90,
this._screencastId = screencastId; maxWidth: options.width,
this._crPage._browserContext._browser._videoStarted(this._crPage._browserContext, screencastId, options.outputFile, this._crPage.pageOrError()); maxHeight: options.height,
await Promise.all([ });
this._client.send('Page.startScreencast', { this._crPage._browserContext._browser._videoStarted(this._crPage._browserContext, screencastId, options.outputFile, this._crPage.pageOrError());
format: 'jpeg', await gotFirstFrame;
quality: 90,
maxWidth: options.width,
maxHeight: options.height,
}),
new Promise(f => this._client.once('Page.screencastFrame', f))
]);
} catch (e) {
videoRecorder.stop().catch(() => {});
throw e;
}
} }
async _stopScreencast(): Promise<void> { async _stopScreencast(): Promise<void> {
if (this._screencastState !== 'started') if (!this._screencastId)
throw new Error('No screencast in progress, current state: ' + this._screencastState); return;
try { await this._client._sendMayFail('Page.stopScreencast');
await this._client.send('Page.stopScreencast'); const recorder = this._videoRecorder!;
} finally { const screencastId = this._screencastId;
const recorder = this._videoRecorder!; this._videoRecorder = null;
const screencastId = this._screencastId!; this._screencastId = null;
this._videoRecorder = null; await recorder.stop().catch(() => {});
this._screencastId = null; this._crPage._browserContext._browser._videoFinished(screencastId);
this._screencastState = 'stopped';
await recorder.stop().catch(() => {});
this._crPage._browserContext._browser._videoFinished(screencastId);
}
} }
async _updateExtraHTTPHeaders(initial: boolean): Promise<void> { async _updateExtraHTTPHeaders(initial: boolean): Promise<void> {

View file

@ -380,18 +380,6 @@ export class FFPage implements PageDelegate {
throw new Error('Not implemented'); throw new Error('Not implemented');
} }
async startScreencast(options: types.PageScreencastOptions): Promise<void> {
this._session.send('Page.startVideoRecording', {
file: options.outputFile,
width: options.width,
height: options.height,
});
}
async stopScreencast(): Promise<void> {
await this._session.send('Page.stopVideoRecording');
}
async takeScreenshot(format: 'png' | 'jpeg', documentRect: types.Rect | undefined, viewportRect: types.Rect | undefined, quality: number | undefined): Promise<Buffer> { async takeScreenshot(format: 'png' | 'jpeg', documentRect: types.Rect | undefined, viewportRect: types.Rect | undefined, quality: number | undefined): Promise<Buffer> {
if (!documentRect) { if (!documentRect) {
const context = await this._page.mainFrame()._utilityContext(); const context = await this._page.mainFrame()._utilityContext();

View file

@ -61,8 +61,6 @@ export interface PageDelegate {
canScreenshotOutsideViewport(): boolean; canScreenshotOutsideViewport(): boolean;
resetViewport(): Promise<void>; // Only called if canScreenshotOutsideViewport() returns false. resetViewport(): Promise<void>; // Only called if canScreenshotOutsideViewport() returns false.
setBackgroundColor(color?: { r: number; g: number; b: number; a: number; }): Promise<void>; setBackgroundColor(color?: { r: number; g: number; b: number; a: number; }): Promise<void>;
startScreencast(options: types.PageScreencastOptions): Promise<void>;
stopScreencast(): Promise<void>;
takeScreenshot(format: string, documentRect: types.Rect | undefined, viewportRect: types.Rect | undefined, quality: number | undefined): Promise<Buffer>; takeScreenshot(format: string, documentRect: types.Rect | undefined, viewportRect: types.Rect | undefined, quality: number | undefined): Promise<Buffer>;
isElementHandle(remoteObject: any): boolean; isElementHandle(remoteObject: any): boolean;

View file

@ -119,7 +119,7 @@ export class WKPage implements PageDelegate {
const size = this._browserContext._options.recordVideo.size || this._browserContext._options.viewport || { width: 1280, height: 720 }; const size = this._browserContext._options.recordVideo.size || this._browserContext._options.viewport || { width: 1280, height: 720 };
const outputFile = path.join(this._browserContext._options.recordVideo.dir, createGuid() + '.webm'); const outputFile = path.join(this._browserContext._options.recordVideo.dir, createGuid() + '.webm');
promises.push(this._browserContext._ensureVideosPath().then(() => { promises.push(this._browserContext._ensureVideosPath().then(() => {
return this.startScreencast({ return this._startScreencast({
...size, ...size,
outputFile, outputFile,
}); });
@ -712,8 +712,7 @@ export class WKPage implements PageDelegate {
} }
async closePage(runBeforeUnload: boolean): Promise<void> { async closePage(runBeforeUnload: boolean): Promise<void> {
if (this._recordingVideoFile) await this._stopScreencast();
await this.stopScreencast();
await this._pageProxySession.sendMayFail('Target.close', { await this._pageProxySession.sendMayFail('Target.close', {
targetId: this._session.sessionId, targetId: this._session.sessionId,
runBeforeUnload runBeforeUnload
@ -728,28 +727,22 @@ export class WKPage implements PageDelegate {
await this._session.send('Page.setDefaultBackgroundColorOverride', { color }); await this._session.send('Page.setDefaultBackgroundColorOverride', { color });
} }
async startScreencast(options: types.PageScreencastOptions): Promise<void> { async _startScreencast(options: types.PageScreencastOptions): Promise<void> {
if (this._recordingVideoFile) assert(!this._recordingVideoFile);
throw new Error('Already recording'); const { screencastId } = await this._pageProxySession.send('Screencast.start', {
file: options.outputFile,
width: options.width,
height: options.height,
});
this._recordingVideoFile = options.outputFile; this._recordingVideoFile = options.outputFile;
try { this._browserContext._browser._videoStarted(this._browserContext, screencastId, options.outputFile, this.pageOrError());
const {screencastId} = await this._pageProxySession.send('Screencast.start', {
file: options.outputFile,
width: options.width,
height: options.height,
}) as any;
this._browserContext._browser._videoStarted(this._browserContext, screencastId, options.outputFile, this.pageOrError());
} catch (e) {
this._recordingVideoFile = null;
throw e;
}
} }
async stopScreencast(): Promise<void> { async _stopScreencast(): Promise<void> {
if (!this._recordingVideoFile) if (!this._recordingVideoFile)
throw new Error('No video recording in progress'); return;
await this._pageProxySession.sendMayFail('Screencast.stop');
this._recordingVideoFile = null; this._recordingVideoFile = null;
await this._pageProxySession.send('Screencast.stop');
} }
async takeScreenshot(format: string, documentRect: types.Rect | undefined, viewportRect: types.Rect | undefined, quality: number | undefined): Promise<Buffer> { async takeScreenshot(format: string, documentRect: types.Rect | undefined, viewportRect: types.Rect | undefined, quality: number | undefined): Promise<Buffer> {