feat(screencast): auto recording for new pages in chromium (#3701)

This commit is contained in:
Yury Semikhatsky 2020-08-31 15:21:02 -07:00 committed by GitHub
parent f23dbfb009
commit a17dd98cf9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 76 additions and 27 deletions

View file

@ -53,7 +53,6 @@ export abstract class BrowserContext extends EventEmitter {
Close: 'close',
Page: 'page',
ScreencastStarted: 'screencaststarted',
ScreencastStopped: 'screencaststopped',
};
readonly _timeoutSettings = new TimeoutSettings();

View file

@ -27,6 +27,7 @@ import { Protocol } from './protocol';
import { toConsoleMessageLocation, exceptionToError, releaseObject } from './crProtocolHelper';
import * as dialog from '../dialog';
import { PageDelegate } from '../page';
import * as path from 'path';
import { RawMouseImpl, RawKeyboardImpl } from './crInput';
import { getAccessibilityTree } from './crAccessibility';
import { CRCoverage } from './crCoverage';
@ -36,7 +37,7 @@ import * as types from '../types';
import { ConsoleMessage } from '../console';
import * as sourceMap from '../../utils/sourceMap';
import { rewriteErrorMessage } from '../../utils/stackTrace';
import { assert, headersArrayToObject } from '../../utils/utils';
import { assert, headersArrayToObject, createGuid } from '../../utils/utils';
import { VideoRecorder } from './videoRecorder';
@ -112,6 +113,7 @@ export class CRPage implements PageDelegate {
for (const session of this._sessions.values())
session.dispose();
this._page._didClose();
this._mainFrameSession._stopScreencast().catch(() => {});
}
async navigateFrame(frame: frames.Frame, url: string, referrer: string | undefined): Promise<frames.GotoResult> {
@ -210,7 +212,7 @@ export class CRPage implements PageDelegate {
}
async startScreencast(options: types.PageScreencastOptions): Promise<void> {
await this._mainFrameSession._startScreencast(options);
await this._mainFrameSession._startScreencast(createGuid(), options);
}
async stopScreencast(): Promise<void> {
@ -326,6 +328,8 @@ class FrameSession {
// See Target.detachedFromTarget handler for details.
private _swappedIn = false;
private _videoRecorder: VideoRecorder | null = null;
private _screencastId: string | null = null;
private _screencastState: 'stopped' | 'starting' | 'started' = 'stopped';
constructor(crPage: CRPage, client: CRSession, targetId: string, parentSession: FrameSession | null) {
this._client = client;
@ -454,6 +458,13 @@ class FrameSession {
promises.push(this._evaluateOnNewDocument(source));
for (const source of this._crPage._page._evaluateOnNewDocumentSources)
promises.push(this._evaluateOnNewDocument(source));
if (this._crPage._browserContext._screencastOptions) {
const contextOptions = this._crPage._browserContext._screencastOptions;
const screencastId = createGuid();
const outputFile = path.join(contextOptions.dir, screencastId + '.webm');
const options = Object.assign({}, contextOptions, {outputFile});
promises.push(this._startScreencast(screencastId, options));
}
promises.push(this._client.send('Runtime.runIfWaitingForDebugger'));
promises.push(this._firstNonInitialNavigationCommittedPromise);
await Promise.all(promises);
@ -735,24 +746,42 @@ class FrameSession {
this._client.send('Page.screencastFrameAck', {sessionId: payload.sessionId});
}
async _startScreencast(options: types.PageScreencastOptions): Promise<void> {
assert(!this._videoRecorder, 'Already started');
this._videoRecorder = await VideoRecorder.launch(options);
await this._client.send('Page.startScreencast', {
format: 'jpeg',
quality: 90,
maxWidth: options.width,
maxHeight: options.height,
});
async _startScreencast(screencastId: string, options: types.PageScreencastOptions): Promise<void> {
if (this._screencastState !== 'stopped')
throw new Error('Already started');
const videoRecorder = await VideoRecorder.launch(options);
this._screencastState = 'starting';
try {
await this._client.send('Page.startScreencast', {
format: 'jpeg',
quality: 90,
maxWidth: options.width,
maxHeight: options.height,
});
this._screencastState = 'started';
this._videoRecorder = videoRecorder;
this._screencastId = screencastId;
this._crPage._browserContext._browser._screencastStarted(screencastId, options.outputFile, this._page);
} catch (e) {
videoRecorder.stop().catch(() => {});
throw e;
}
}
async _stopScreencast(): Promise<void> {
if (!this._videoRecorder)
return;
const recorder = this._videoRecorder;
this._videoRecorder = null;
await this._client.send('Page.stopScreencast');
await recorder.stop();
if (this._screencastState !== 'started')
throw new Error('No screencast in progress, current state: ' + this._screencastState);
try {
await this._client.send('Page.stopScreencast');
} finally {
const recorder = this._videoRecorder!;
const screencastId = this._screencastId!;
this._videoRecorder = null;
this._screencastId = null;
this._screencastState = 'stopped';
await recorder.stop().catch(() => {});
this._crPage._browserContext._browser._screencastFinished(screencastId);
}
}
async _updateExtraHTTPHeaders(): Promise<void> {

View file

@ -40,7 +40,6 @@ if (browserName === 'webkit')
// Screencast APIs that are not publicly available.
api.delete('browserContext.emit("screencaststarted")');
api.delete('browserContext.emit("screencaststopped")');
const coverageDir = path.join(__dirname, 'coverage-report');

View file

@ -194,6 +194,7 @@ describe('screencast', suite => {
it('should capture navigation', test => {
test.flaky(options.CHROMIUM && MAC);
test.flaky(options.FIREFOX && LINUX && !options.HEADLESS);
test.flaky(options.FIREFOX && WIN);
test.flaky(options.WEBKIT);
}, async ({page, tmpDir, server, videoPlayer, toImpl}) => {
const videoFile = path.join(tmpDir, 'v.webm');
@ -225,24 +226,26 @@ describe('screencast', suite => {
expectAll(pixels, almostGrey);
}
});
});
describe('screencast', suite => {
suite.slow();
suite.skip(options.WIRE || options.CHROMIUM);
}, () => {
it('should capture css transformation', test => {
test.fixme(options.WEBKIT && WIN, 'Accelerated compositing is disabled in WebKit on Windows.');
test.flaky(options.WEBKIT && LINUX);
}, async ({page, tmpDir, server, videoPlayer, toImpl}) => {
const videoFile = path.join(tmpDir, 'v.webm');
// Chromium automatically fits all frames to fit specified size. To avoid
// unwanted transformations we set view port size equal to the screencast
// size.
// TODO: support explicit 'scale' parameter in CDP.
if (options.CHROMIUM)
await page.setViewportSize({width: 640, height: 480});
await page.goto(server.PREFIX + '/rotate-z.html');
await toImpl(page)._delegate.startScreencast({outputFile: videoFile, width: 640, height: 480});
// TODO: in WebKit figure out why video size is not reported correctly for
// static pictures.
if (options.HEADLESS && options.WEBKIT)
await page.setViewportSize({width: 1270, height: 950});
await new Promise(r => setTimeout(r, 300));
// 300 is not enough for Chromium headful.
await new Promise(r => setTimeout(r, 500));
await toImpl(page)._delegate.stopScreencast();
expect(fs.existsSync(videoFile)).toBe(true);
@ -257,12 +260,12 @@ describe('screencast', suite => {
}
});
it('should fire start/stop events when page created/closed', test => {
it('should sutomatically start/finish when new page is created/closed', test => {
test.flaky(options.FIREFOX, 'Even slow is not slow enough');
}, async ({browser, tmpDir, toImpl}) => {
// Use server side of the context. All the code below also uses server side APIs.
const context = toImpl(await browser.newContext());
await context._enableScreencast({width: 640, height: 480, dir: tmpDir});
await context._enableScreencast({width: 320, height: 240, dir: tmpDir});
expect(context._screencastOptions).toBeTruthy();
const [screencast, newPage] = await Promise.all([
@ -279,6 +282,25 @@ describe('screencast', suite => {
await context.close();
});
it('should finish when contex closes', async ({browser, tmpDir, toImpl}) => {
// Use server side of the context. All the code below also uses server side APIs.
const context = toImpl(await browser.newContext());
await context._enableScreencast({width: 320, height: 240, dir: tmpDir});
expect(context._screencastOptions).toBeTruthy();
const [screencast, newPage] = await Promise.all([
new Promise(resolve => context.on('screencaststarted', resolve)) as Promise<any>,
context.newPage(),
]);
expect(screencast.page === newPage).toBe(true);
const [videoFile] = await Promise.all([
screencast.path(),
context.close(),
]);
expect(path.dirname(videoFile)).toBe(tmpDir);
});
it('should fire start event for popups', async ({browser, tmpDir, server, toImpl}) => {
// Use server side of the context. All the code below also uses server side APIs.
const context = toImpl(await browser.newContext());