browser(firefox): support screencast frame size and scale configuration (#2847)

This commit is contained in:
Yury Semikhatsky 2020-07-06 11:28:35 -07:00 committed by GitHub
parent ac2185a9d9
commit fc18f2f38a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 56 additions and 18 deletions

View file

@ -1,2 +1,2 @@
1122
Changed: yurys@chromium.org Thu Jul 2 14:18:12 PDT 2020
1123
Changed: yurys@chromium.org Mon Jul 6 11:13:23 PDT 2020

View file

@ -287,9 +287,14 @@ class PageHandler {
}
startVideoRecording({file, width, height, scale}) {
if (width < 10 || width > 10000 || height < 10 || height > 10000)
throw new Error("Invalid size");
if (scale && (scale <= 0 || scale > 1))
throw new Error("Unsupported scale");
const screencast = Cc['@mozilla.org/juggler/screencast;1'].getService(Ci.nsIScreencastService);
const docShell = this._pageTarget._gBrowser.ownerGlobal.docShell;
this._videoSessionId = screencast.startVideoRecording(docShell, file);
this._videoSessionId = screencast.startVideoRecording(docShell, file, width, height, scale || 0);
}
stopVideoRecording() {

View file

@ -31,6 +31,7 @@
#include "ScreencastEncoder.h"
#include <algorithm>
#include <libyuv.h>
#include <vpx/vp8.h>
#include <vpx/vp8cx.h>
@ -47,7 +48,8 @@ const int kMacroBlockSize = 16;
void createImage(unsigned int width, unsigned int height,
std::unique_ptr<vpx_image_t>& out_image,
std::unique_ptr<uint8_t[]>& out_image_buffer) {
std::unique_ptr<uint8_t[]>& out_image_buffer,
int& out_buffer_size) {
std::unique_ptr<vpx_image_t> image(new vpx_image_t());
memset(image.get(), 0, sizeof(vpx_image_t));
@ -78,11 +80,11 @@ void createImage(unsigned int width, unsigned int height,
const int uv_rows = y_rows >> image->y_chroma_shift;
// Allocate a YUV buffer large enough for the aligned data & padding.
const int buffer_size = y_stride * y_rows + 2*uv_stride * uv_rows;
std::unique_ptr<uint8_t[]> image_buffer(new uint8_t[buffer_size]);
out_buffer_size = y_stride * y_rows + 2*uv_stride * uv_rows;
std::unique_ptr<uint8_t[]> image_buffer(new uint8_t[out_buffer_size]);
// Reset image value to 128 so we just need to fill in the y plane.
memset(image_buffer.get(), 128, buffer_size);
memset(image_buffer.get(), 128, out_buffer_size);
// Fill in the information for |image_|.
unsigned char* uchar_buffer =
@ -180,13 +182,39 @@ public:
uint8_t* u_data = image->planes[1];
uint8_t* v_data = image->planes[2];
libyuv::I420Copy(src->DataY(), src->StrideY(),
src->DataU(), src->StrideU(),
src->DataV(), src->StrideV(),
y_data, y_stride,
u_data, uv_stride,
v_data, uv_stride,
image->w, image->h);
if (m_scale) {
int src_width = src->width();
double dst_width = src_width * m_scale.value();
if (dst_width > image->w) {
src_width *= image->w / dst_width;
dst_width = image->w;
}
int src_height = src->height();
double dst_height = src_height * m_scale.value();
if (dst_height > image->h) {
src_height *= image->h / dst_height;
dst_height = image->h;
}
libyuv::I420Scale(src->DataY(), src->StrideY(),
src->DataU(), src->StrideU(),
src->DataV(), src->StrideV(),
src_width, src_height,
y_data, y_stride,
u_data, uv_stride,
v_data, uv_stride,
dst_width, dst_height,
libyuv::kFilterBilinear);
} else {
int width = std::min<int>(image->w, src->width());
int height = std::min<int>(image->h, src->height());
libyuv::I420Copy(src->DataY(), src->StrideY(),
src->DataU(), src->StrideU(),
src->DataV(), src->StrideV(),
y_data, y_stride,
u_data, uv_stride,
v_data, uv_stride,
width, height);
}
}
private:
@ -212,7 +240,7 @@ public:
ivf_write_file_header(m_file, &m_cfg, m_fourcc, 0);
createImage(cfg.g_w, cfg.g_h, m_image, m_imageBuffer);
createImage(cfg.g_w, cfg.g_h, m_image, m_imageBuffer, m_imageBufferSize);
}
~VPXCodec() {
@ -223,6 +251,7 @@ public:
void encodeFrameAsync(std::unique_ptr<VPXFrame>&& frame)
{
m_encoderQueue->Dispatch(NS_NewRunnableFunction("VPXCodec::encodeFrameAsync", [this, frame = std::move(frame)] {
memset(m_imageBuffer.get(), 128, m_imageBufferSize);
frame->convertToVpxImage(m_image.get());
// TODO: figure out why passing duration to the codec results in much
// worse visual quality and makes video stutter.
@ -292,6 +321,7 @@ private:
int m_frameCount { 0 };
int64_t m_pts { 0 };
std::unique_ptr<uint8_t[]> m_imageBuffer;
int m_imageBufferSize { 0 };
std::unique_ptr<vpx_image_t> m_image;
};

View file

@ -12,6 +12,6 @@ interface nsIDocShell;
[scriptable, uuid(d8c4d9e0-9462-445e-9e43-68d3872ad1de)]
interface nsIScreencastService : nsISupports
{
long startVideoRecording(in nsIDocShell docShell, in ACString fileName);
long startVideoRecording(in nsIDocShell docShell, in ACString fileName, in uint32_t width, in uint32_t height, in double scale);
void stopVideoRecording(in long sessionId);
};

View file

@ -93,7 +93,7 @@ nsScreencastService::nsScreencastService() = default;
nsScreencastService::~nsScreencastService() {
}
nsresult nsScreencastService::StartVideoRecording(nsIDocShell* aDocShell, const nsACString& aFileName, int32_t* sessionId) {
nsresult nsScreencastService::StartVideoRecording(nsIDocShell* aDocShell, const nsACString& aFileName, uint32_t width, uint32_t height, double scale, int32_t* sessionId) {
MOZ_RELEASE_ASSERT(NS_IsMainThread(), "Screencast service must be started on the Main thread.");
*sessionId = -1;
@ -121,7 +121,10 @@ nsresult nsScreencastService::StartVideoRecording(nsIDocShell* aDocShell, const
# endif
*sessionId = ++mLastSessionId;
nsCString error;
RefPtr<ScreencastEncoder> encoder = ScreencastEncoder::create(error, PromiseFlatCString(aFileName), 1280, 960, Nothing());
Maybe<double> maybeScale;
if (scale)
maybeScale = Some(scale);
RefPtr<ScreencastEncoder> encoder = ScreencastEncoder::create(error, PromiseFlatCString(aFileName), width, height, maybeScale);
if (!encoder) {
fprintf(stderr, "Failed to create ScreencastEncoder: %s\n", error.get());
return NS_ERROR_FAILURE;