feat(screencast): auto recording for new pages in chromium (#3701)

This commit is contained in:
Yury Semikhatsky 2020-08-31 15:21:02 -07:00 committed by GitHub
parent f23dbfb009
commit a17dd98cf9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 76 additions and 27 deletions

View file

@ -53,7 +53,6 @@ export abstract class BrowserContext extends EventEmitter {
Close: 'close', Close: 'close',
Page: 'page', Page: 'page',
ScreencastStarted: 'screencaststarted', ScreencastStarted: 'screencaststarted',
ScreencastStopped: 'screencaststopped',
}; };
readonly _timeoutSettings = new TimeoutSettings(); readonly _timeoutSettings = new TimeoutSettings();

View file

@ -27,6 +27,7 @@ import { Protocol } from './protocol';
import { toConsoleMessageLocation, exceptionToError, releaseObject } from './crProtocolHelper'; import { toConsoleMessageLocation, exceptionToError, releaseObject } from './crProtocolHelper';
import * as dialog from '../dialog'; import * as dialog from '../dialog';
import { PageDelegate } from '../page'; import { PageDelegate } from '../page';
import * as path from 'path';
import { RawMouseImpl, RawKeyboardImpl } from './crInput'; import { RawMouseImpl, RawKeyboardImpl } from './crInput';
import { getAccessibilityTree } from './crAccessibility'; import { getAccessibilityTree } from './crAccessibility';
import { CRCoverage } from './crCoverage'; import { CRCoverage } from './crCoverage';
@ -36,7 +37,7 @@ import * as types from '../types';
import { ConsoleMessage } from '../console'; import { ConsoleMessage } from '../console';
import * as sourceMap from '../../utils/sourceMap'; import * as sourceMap from '../../utils/sourceMap';
import { rewriteErrorMessage } from '../../utils/stackTrace'; import { rewriteErrorMessage } from '../../utils/stackTrace';
import { assert, headersArrayToObject } from '../../utils/utils'; import { assert, headersArrayToObject, createGuid } from '../../utils/utils';
import { VideoRecorder } from './videoRecorder'; import { VideoRecorder } from './videoRecorder';
@ -112,6 +113,7 @@ export class CRPage implements PageDelegate {
for (const session of this._sessions.values()) for (const session of this._sessions.values())
session.dispose(); session.dispose();
this._page._didClose(); this._page._didClose();
this._mainFrameSession._stopScreencast().catch(() => {});
} }
async navigateFrame(frame: frames.Frame, url: string, referrer: string | undefined): Promise<frames.GotoResult> { async navigateFrame(frame: frames.Frame, url: string, referrer: string | undefined): Promise<frames.GotoResult> {
@ -210,7 +212,7 @@ export class CRPage implements PageDelegate {
} }
async startScreencast(options: types.PageScreencastOptions): Promise<void> { async startScreencast(options: types.PageScreencastOptions): Promise<void> {
await this._mainFrameSession._startScreencast(options); await this._mainFrameSession._startScreencast(createGuid(), options);
} }
async stopScreencast(): Promise<void> { async stopScreencast(): Promise<void> {
@ -326,6 +328,8 @@ class FrameSession {
// See Target.detachedFromTarget handler for details. // See Target.detachedFromTarget handler for details.
private _swappedIn = false; private _swappedIn = false;
private _videoRecorder: VideoRecorder | null = null; private _videoRecorder: VideoRecorder | null = null;
private _screencastId: string | null = null;
private _screencastState: 'stopped' | 'starting' | 'started' = 'stopped';
constructor(crPage: CRPage, client: CRSession, targetId: string, parentSession: FrameSession | null) { constructor(crPage: CRPage, client: CRSession, targetId: string, parentSession: FrameSession | null) {
this._client = client; this._client = client;
@ -454,6 +458,13 @@ class FrameSession {
promises.push(this._evaluateOnNewDocument(source)); promises.push(this._evaluateOnNewDocument(source));
for (const source of this._crPage._page._evaluateOnNewDocumentSources) for (const source of this._crPage._page._evaluateOnNewDocumentSources)
promises.push(this._evaluateOnNewDocument(source)); promises.push(this._evaluateOnNewDocument(source));
if (this._crPage._browserContext._screencastOptions) {
const contextOptions = this._crPage._browserContext._screencastOptions;
const screencastId = createGuid();
const outputFile = path.join(contextOptions.dir, screencastId + '.webm');
const options = Object.assign({}, contextOptions, {outputFile});
promises.push(this._startScreencast(screencastId, options));
}
promises.push(this._client.send('Runtime.runIfWaitingForDebugger')); promises.push(this._client.send('Runtime.runIfWaitingForDebugger'));
promises.push(this._firstNonInitialNavigationCommittedPromise); promises.push(this._firstNonInitialNavigationCommittedPromise);
await Promise.all(promises); await Promise.all(promises);
@ -735,24 +746,42 @@ class FrameSession {
this._client.send('Page.screencastFrameAck', {sessionId: payload.sessionId}); this._client.send('Page.screencastFrameAck', {sessionId: payload.sessionId});
} }
async _startScreencast(options: types.PageScreencastOptions): Promise<void> { async _startScreencast(screencastId: string, options: types.PageScreencastOptions): Promise<void> {
assert(!this._videoRecorder, 'Already started'); if (this._screencastState !== 'stopped')
this._videoRecorder = await VideoRecorder.launch(options); throw new Error('Already started');
await this._client.send('Page.startScreencast', { const videoRecorder = await VideoRecorder.launch(options);
format: 'jpeg', this._screencastState = 'starting';
quality: 90, try {
maxWidth: options.width, await this._client.send('Page.startScreencast', {
maxHeight: options.height, format: 'jpeg',
}); quality: 90,
maxWidth: options.width,
maxHeight: options.height,
});
this._screencastState = 'started';
this._videoRecorder = videoRecorder;
this._screencastId = screencastId;
this._crPage._browserContext._browser._screencastStarted(screencastId, options.outputFile, this._page);
} catch (e) {
videoRecorder.stop().catch(() => {});
throw e;
}
} }
async _stopScreencast(): Promise<void> { async _stopScreencast(): Promise<void> {
if (!this._videoRecorder) if (this._screencastState !== 'started')
return; throw new Error('No screencast in progress, current state: ' + this._screencastState);
const recorder = this._videoRecorder; try {
this._videoRecorder = null; await this._client.send('Page.stopScreencast');
await this._client.send('Page.stopScreencast'); } finally {
await recorder.stop(); const recorder = this._videoRecorder!;
const screencastId = this._screencastId!;
this._videoRecorder = null;
this._screencastId = null;
this._screencastState = 'stopped';
await recorder.stop().catch(() => {});
this._crPage._browserContext._browser._screencastFinished(screencastId);
}
} }
async _updateExtraHTTPHeaders(): Promise<void> { async _updateExtraHTTPHeaders(): Promise<void> {

View file

@ -40,7 +40,6 @@ if (browserName === 'webkit')
// Screencast APIs that are not publicly available. // Screencast APIs that are not publicly available.
api.delete('browserContext.emit("screencaststarted")'); api.delete('browserContext.emit("screencaststarted")');
api.delete('browserContext.emit("screencaststopped")');
const coverageDir = path.join(__dirname, 'coverage-report'); const coverageDir = path.join(__dirname, 'coverage-report');

View file

@ -194,6 +194,7 @@ describe('screencast', suite => {
it('should capture navigation', test => { it('should capture navigation', test => {
test.flaky(options.CHROMIUM && MAC); test.flaky(options.CHROMIUM && MAC);
test.flaky(options.FIREFOX && LINUX && !options.HEADLESS); test.flaky(options.FIREFOX && LINUX && !options.HEADLESS);
test.flaky(options.FIREFOX && WIN);
test.flaky(options.WEBKIT); test.flaky(options.WEBKIT);
}, async ({page, tmpDir, server, videoPlayer, toImpl}) => { }, async ({page, tmpDir, server, videoPlayer, toImpl}) => {
const videoFile = path.join(tmpDir, 'v.webm'); const videoFile = path.join(tmpDir, 'v.webm');
@ -225,24 +226,26 @@ describe('screencast', suite => {
expectAll(pixels, almostGrey); expectAll(pixels, almostGrey);
} }
}); });
});
describe('screencast', suite => {
suite.slow();
suite.skip(options.WIRE || options.CHROMIUM);
}, () => {
it('should capture css transformation', test => { it('should capture css transformation', test => {
test.fixme(options.WEBKIT && WIN, 'Accelerated compositing is disabled in WebKit on Windows.'); test.fixme(options.WEBKIT && WIN, 'Accelerated compositing is disabled in WebKit on Windows.');
test.flaky(options.WEBKIT && LINUX); test.flaky(options.WEBKIT && LINUX);
}, async ({page, tmpDir, server, videoPlayer, toImpl}) => { }, async ({page, tmpDir, server, videoPlayer, toImpl}) => {
const videoFile = path.join(tmpDir, 'v.webm'); const videoFile = path.join(tmpDir, 'v.webm');
// Chromium automatically fits all frames to fit specified size. To avoid
// unwanted transformations we set view port size equal to the screencast
// size.
// TODO: support explicit 'scale' parameter in CDP.
if (options.CHROMIUM)
await page.setViewportSize({width: 640, height: 480});
await page.goto(server.PREFIX + '/rotate-z.html'); await page.goto(server.PREFIX + '/rotate-z.html');
await toImpl(page)._delegate.startScreencast({outputFile: videoFile, width: 640, height: 480}); await toImpl(page)._delegate.startScreencast({outputFile: videoFile, width: 640, height: 480});
// TODO: in WebKit figure out why video size is not reported correctly for // TODO: in WebKit figure out why video size is not reported correctly for
// static pictures. // static pictures.
if (options.HEADLESS && options.WEBKIT) if (options.HEADLESS && options.WEBKIT)
await page.setViewportSize({width: 1270, height: 950}); await page.setViewportSize({width: 1270, height: 950});
await new Promise(r => setTimeout(r, 300)); // 300 is not enough for Chromium headful.
await new Promise(r => setTimeout(r, 500));
await toImpl(page)._delegate.stopScreencast(); await toImpl(page)._delegate.stopScreencast();
expect(fs.existsSync(videoFile)).toBe(true); expect(fs.existsSync(videoFile)).toBe(true);
@ -257,12 +260,12 @@ describe('screencast', suite => {
} }
}); });
it('should fire start/stop events when page created/closed', test => { it('should sutomatically start/finish when new page is created/closed', test => {
test.flaky(options.FIREFOX, 'Even slow is not slow enough'); test.flaky(options.FIREFOX, 'Even slow is not slow enough');
}, async ({browser, tmpDir, toImpl}) => { }, async ({browser, tmpDir, toImpl}) => {
// Use server side of the context. All the code below also uses server side APIs. // Use server side of the context. All the code below also uses server side APIs.
const context = toImpl(await browser.newContext()); const context = toImpl(await browser.newContext());
await context._enableScreencast({width: 640, height: 480, dir: tmpDir}); await context._enableScreencast({width: 320, height: 240, dir: tmpDir});
expect(context._screencastOptions).toBeTruthy(); expect(context._screencastOptions).toBeTruthy();
const [screencast, newPage] = await Promise.all([ const [screencast, newPage] = await Promise.all([
@ -279,6 +282,25 @@ describe('screencast', suite => {
await context.close(); await context.close();
}); });
it('should finish when contex closes', async ({browser, tmpDir, toImpl}) => {
// Use server side of the context. All the code below also uses server side APIs.
const context = toImpl(await browser.newContext());
await context._enableScreencast({width: 320, height: 240, dir: tmpDir});
expect(context._screencastOptions).toBeTruthy();
const [screencast, newPage] = await Promise.all([
new Promise(resolve => context.on('screencaststarted', resolve)) as Promise<any>,
context.newPage(),
]);
expect(screencast.page === newPage).toBe(true);
const [videoFile] = await Promise.all([
screencast.path(),
context.close(),
]);
expect(path.dirname(videoFile)).toBe(tmpDir);
});
it('should fire start event for popups', async ({browser, tmpDir, server, toImpl}) => { it('should fire start event for popups', async ({browser, tmpDir, server, toImpl}) => {
// Use server side of the context. All the code below also uses server side APIs. // Use server side of the context. All the code below also uses server side APIs.
const context = toImpl(await browser.newContext()); const context = toImpl(await browser.newContext());