Bug 1816559 - Remote compositor recording from GPU process r=mstange

Differential Revision: https://phabricator.services.mozilla.com/D170514
This commit is contained in:
Chris Martin 2023-03-14 13:31:37 +00:00
Родитель b23a9763fb
Коммит b42c4425d0
16 изменённых файлов: 284 добавлений и 372 удалений

Просмотреть файл

@ -19,6 +19,7 @@
#include "nsFrameManager.h"
#include "nsRefreshDriver.h"
#include "nsStyleUtil.h"
#include "mozilla/Base64.h"
#include "mozilla/dom/Animation.h"
#include "mozilla/dom/BindingDeclarations.h"
#include "mozilla/dom/BlobBinding.h"
@ -117,6 +118,7 @@
#include "mozilla/dom/Promise.h"
#include "mozilla/ServoBindings.h"
#include "mozilla/StyleSheetInlines.h"
#include "mozilla/gfx/gfxVars.h"
#include "mozilla/gfx/GPUProcessManager.h"
#include "mozilla/dom/TimeoutManager.h"
#include "mozilla/PreloadedStyleSheet.h"
@ -131,6 +133,12 @@
#include "mozilla/IMEStateManager.h"
#include "mozilla/IMEContentObserver.h"
#ifdef XP_WIN
# include <direct.h>
#else
# include <sys/stat.h>
#endif
#ifdef XP_WIN
# undef GetClassName
#endif
@ -4557,6 +4565,110 @@ nsDOMWindowUtils::StartCompositionRecording(Promise** aOutPromise) {
return NS_OK;
}
static bool WriteRecordingToDisk(const FrameRecording& aRecording,
double aUnixStartMS) {
// The directory name contains the unix timestamp for when recording started,
// because we want the consumer of these files to be able to compute an
// absolute timestamp of each screenshot. That allows them to align
// screenshots with timed data from other sources, such as Gecko profiler
// information. The time of each screenshot is part of the screenshot's
// filename, expressed as milliseconds from the recording start.
std::stringstream recordingDirectory;
recordingDirectory << gfxVars::LayersWindowRecordingPath()
<< "windowrecording-" << int64_t(aUnixStartMS);
#ifdef XP_WIN
_mkdir(recordingDirectory.str().c_str());
#else
mkdir(recordingDirectory.str().c_str(), 0777);
#endif
auto byteSpan = aRecording.bytes().AsSpan();
uint32_t i = 1;
for (const auto& frame : aRecording.frames()) {
const uint32_t frameBufferLength = frame.length();
if (frameBufferLength > byteSpan.Length()) {
return false;
}
const auto frameSpan = byteSpan.To(frameBufferLength);
byteSpan = byteSpan.From(frameBufferLength);
const double frameTimeMS =
(frame.timeOffset() - aRecording.startTime()).ToMilliseconds();
std::stringstream filename;
filename << recordingDirectory.str() << "/frame-" << i << "-"
<< uint32_t(frameTimeMS) << ".png";
FILE* file = fopen(filename.str().c_str(), "wb");
if (!file) {
return false;
}
const size_t bytesWritten =
fwrite(frameSpan.Elements(), sizeof(uint8_t), frameSpan.Length(), file);
fclose(file);
if (bytesWritten < frameSpan.Length()) {
return false;
}
++i;
}
return byteSpan.Length() == 0;
}
static Maybe<DOMCollectedFrames> ConvertCompositionRecordingFramesToDom(
const FrameRecording& aRecording, double aUnixStartMS) {
auto byteSpan = aRecording.bytes().AsSpan();
nsTArray<DOMCollectedFrame> domFrames;
for (const auto& recordedFrame : aRecording.frames()) {
const uint32_t frameBufferLength = recordedFrame.length();
if (frameBufferLength > byteSpan.Length()) {
return Nothing();
}
const auto frameSpan = byteSpan.To(frameBufferLength);
byteSpan = byteSpan.From(frameBufferLength);
nsCString dataUri;
dataUri.AppendLiteral("data:image/png;base64,");
nsresult rv =
Base64EncodeAppend(reinterpret_cast<const char*>(frameSpan.Elements()),
frameSpan.Length(), dataUri);
if (NS_FAILED(rv)) {
return Nothing();
}
DOMCollectedFrame domFrame;
domFrame.mTimeOffset =
(recordedFrame.timeOffset() - aRecording.startTime()).ToMilliseconds();
domFrame.mDataUri = std::move(dataUri);
domFrames.AppendElement(std::move(domFrame));
}
if (byteSpan.Length() != 0) {
return Nothing();
}
DOMCollectedFrames result;
result.mRecordingStart = aUnixStartMS;
result.mFrames = std::move(domFrames);
return Some(std::move(result));
}
NS_IMETHODIMP
nsDOMWindowUtils::StopCompositionRecording(bool aWriteToDisk,
Promise** aOutPromise) {
@ -4574,85 +4686,61 @@ nsDOMWindowUtils::StopCompositionRecording(bool aWriteToDisk,
return err.StealNSResult();
}
RefPtr<Promise>(promise).forget(aOutPromise);
CompositorBridgeChild* cbc = GetCompositorBridge();
if (NS_WARN_IF(!cbc)) {
promise->MaybeReject(NS_ERROR_UNEXPECTED);
} else if (aWriteToDisk) {
cbc->SendEndRecordingToDisk()->Then(
GetCurrentSerialEventTarget(), __func__,
[promise](const bool& aSuccess) {
if (aSuccess) {
promise->MaybeResolveWithUndefined();
} else {
promise->MaybeRejectWithInvalidStateError(
"The composition recorder is not running.");
}
},
[promise](const mozilla::ipc::ResponseRejectReason&) {
promise->MaybeRejectWithUnknownError(
"Could not stop the composition recorder.");
});
} else {
cbc->SendEndRecordingToMemory()->Then(
GetCurrentSerialEventTarget(), __func__,
[promise](Maybe<CollectedFramesParams>&& aFrames) {
if (!aFrames) {
promise->MaybeRejectWithUnknownError(
"Could not stop the composition recorder.");
return;
}
DOMCollectedFrames domFrames;
if (!domFrames.mFrames.SetCapacity(aFrames->frames().Length(),
fallible)) {
promise->MaybeReject(NS_ERROR_OUT_OF_MEMORY);
return;
}
domFrames.mRecordingStart = aFrames->recordingStart();
CheckedInt<size_t> totalSize(0);
for (const CollectedFrameParams& frame : aFrames->frames()) {
totalSize += frame.length();
}
if (totalSize.isValid() &&
totalSize.value() > aFrames->buffer().Size<char>()) {
promise->MaybeRejectWithUnknownError(
"Could not interpret returned frames.");
return;
}
Span<const char> buffer(aFrames->buffer().get<char>(),
aFrames->buffer().Size<char>());
for (const CollectedFrameParams& frame : aFrames->frames()) {
size_t length = frame.length();
Span<const char> dataUri = buffer.First(length);
// We have to do a fallible AppendElement() because WebIDL
// dictionaries use FallibleTArray. However, this cannot fail due to
// the pre-allocation earlier.
DOMCollectedFrame* domFrame =
domFrames.mFrames.AppendElement(fallible);
MOZ_ASSERT(domFrame);
domFrame->mTimeOffset = frame.timeOffset();
domFrame->mDataUri = nsCString(dataUri);
buffer = buffer.Subspan(length);
}
promise->MaybeResolve(domFrames);
},
[promise](const mozilla::ipc::ResponseRejectReason&) {
promise->MaybeRejectWithUnknownError(
"Could not stop the composition recorder.");
});
return NS_OK;
}
promise.forget(aOutPromise);
cbc->SendEndRecording()->Then(
GetCurrentSerialEventTarget(), __func__,
[promise, aWriteToDisk](Maybe<FrameRecording>&& aRecording) {
if (!aRecording) {
promise->MaybeRejectWithUnknownError("Failed to get frame recording");
return;
}
// We need to know when the recording started in Unix Time.
// Unfortunately, the recording start time is an opaque Timestamp that
// can only be used to calculate a duration.
//
// This is not great, but we are going to get Now() twice in close
// proximity, one in Unix Time and the other in Timestamp time. Then we
// can subtract the length of the recording from the current Unix Time
// to get the Unix start time.
const TimeStamp timestampNow = TimeStamp::Now();
const int64_t unixNowUS = PR_Now();
const TimeDuration recordingLength =
timestampNow - aRecording->startTime();
const double unixNowMS = double(unixNowUS) / 1000.0;
const double unixStartMS = unixNowMS - recordingLength.ToMilliseconds();
if (aWriteToDisk) {
if (!WriteRecordingToDisk(*aRecording, unixStartMS)) {
promise->MaybeRejectWithUnknownError(
"Failed to write recording to disk");
return;
}
promise->MaybeResolveWithUndefined();
} else {
auto maybeDomFrames =
ConvertCompositionRecordingFramesToDom(*aRecording, unixStartMS);
if (!maybeDomFrames) {
promise->MaybeRejectWithUnknownError(
"Unable to base64-encode recorded frames");
return;
}
promise->MaybeResolve(*maybeDomFrames);
}
},
[promise](const mozilla::ipc::ResponseRejectReason&) {
promise->MaybeRejectWithUnknownError(
"IPC failed getting composition recording");
});
return NS_OK;
}

Просмотреть файл

@ -32,76 +32,83 @@ CompositionRecorder::CompositionRecorder(TimeStamp aRecordingStart)
: mRecordingStart(aRecordingStart) {}
void CompositionRecorder::RecordFrame(RecordedFrame* aFrame) {
mCollectedFrames.AppendElement(aFrame);
mRecordedFrames.AppendElement(aFrame);
}
void CompositionRecorder::WriteCollectedFrames() {
// The directory has the format of
// "[LayersWindowRecordingPath]/windowrecording-[mRecordingStartTime as unix
// timestamp]". We need mRecordingStart as a unix timestamp here because we
// want the consumer of these files to be able to compute an absolute
// timestamp of each screenshot, so that we can align screenshots with timed
// data from other sources, such as Gecko profiler information. The time of
// each screenshot is part of the screenshot's filename, expressed as
// milliseconds *relative to mRecordingStart*. We want to compute the number
// of milliseconds between midnight 1 January 1970 UTC and mRecordingStart,
// unfortunately, mozilla::TimeStamp does not have a built-in way of doing
// that. However, PR_Now() returns the number of microseconds since midnight 1
// January 1970 UTC. We call PR_Now() and TimeStamp::Now() very
// closely to each other so that they return their representation of "the same
// time", and then compute (Now - (Now - mRecordingStart)).
std::stringstream str;
nsCString recordingStartTime;
TimeDuration delta = TimeStamp::Now() - mRecordingStart;
recordingStartTime.AppendFloat(
static_cast<double>(PR_Now() / 1000.0 - delta.ToMilliseconds()));
str << gfxVars::LayersWindowRecordingPath() << "windowrecording-"
<< recordingStartTime;
Maybe<FrameRecording> CompositionRecorder::GetRecording() {
FrameRecording recording;
recording.startTime() = mRecordingStart;
nsTArray<uint8_t> bytes;
for (RefPtr<RecordedFrame>& recordedFrame : mRecordedFrames) {
RefPtr<DataSourceSurface> surf = recordedFrame->GetSourceSurface();
if (!surf) {
return Nothing();
}
nsCOMPtr<nsIInputStream> imgStream;
nsresult rv = gfxUtils::EncodeSourceSurfaceAsStream(
surf, ImageType::PNG, u""_ns, getter_AddRefs(imgStream));
if (NS_FAILED(rv)) {
return Nothing();
}
uint64_t bufSize64;
rv = imgStream->Available(&bufSize64);
if (NS_FAILED(rv) || bufSize64 > UINT32_MAX) {
return Nothing();
}
const uint32_t frameLength = static_cast<uint32_t>(bufSize64);
size_t startIndex = bytes.Length();
bytes.SetLength(startIndex + frameLength);
uint8_t* bytePtr = &bytes[startIndex];
uint32_t bytesLeft = frameLength;
while (bytesLeft > 0) {
uint32_t bytesRead = 0;
rv = imgStream->Read(reinterpret_cast<char*>(bytePtr), bytesLeft,
&bytesRead);
if (NS_FAILED(rv) || bytesRead == 0) {
return Nothing();
}
bytePtr += bytesRead;
bytesLeft -= bytesRead;
}
#ifdef DEBUG
// Currently, all implementers of imgIEncoder report their exact size
// through nsIInputStream::Available(), but let's explicitly state that we
// rely on that behavior for the algorithm above.
char dummy = 0;
uint32_t bytesRead = 0;
rv = imgStream->Read(&dummy, 1, &bytesRead);
MOZ_ASSERT(NS_SUCCEEDED(rv) && bytesRead == 0);
#ifdef XP_WIN
_mkdir(str.str().c_str());
#else
mkdir(str.str().c_str(), 0777);
#endif
uint32_t i = 1;
for (RefPtr<RecordedFrame>& frame : mCollectedFrames) {
RefPtr<DataSourceSurface> surf = frame->GetSourceSurface();
std::stringstream filename;
filename << str.str() << "/frame-" << i << "-"
<< uint32_t(
(frame->GetTimeStamp() - mRecordingStart).ToMilliseconds())
<< ".png";
gfxUtils::WriteAsPNG(surf, filename.str().c_str());
i++;
}
mCollectedFrames.Clear();
}
RecordedFrameData frameData;
CollectedFrames CompositionRecorder::GetCollectedFrames() {
nsTArray<CollectedFrame> frames;
frameData.timeOffset() = recordedFrame->GetTimeStamp();
frameData.length() = frameLength;
TimeDuration delta = TimeStamp::Now() - mRecordingStart;
double recordingStart = PR_Now() / 1000.0 - delta.ToMilliseconds();
recording.frames().AppendElement(std::move(frameData));
for (RefPtr<RecordedFrame>& frame : mCollectedFrames) {
nsCString buffer;
RefPtr<DataSourceSurface> surf = frame->GetSourceSurface();
double offset = (frame->GetTimeStamp() - mRecordingStart).ToMilliseconds();
gfxUtils::EncodeSourceSurface(surf, ImageType::PNG, u""_ns,
gfxUtils::eDataURIEncode, nullptr, &buffer);
frames.EmplaceBack(offset, std::move(buffer));
// Now that we're done, release the frame so we can free up its memory
recordedFrame = nullptr;
}
mCollectedFrames.Clear();
mRecordedFrames.Clear();
return CollectedFrames(recordingStart, std::move(frames));
recording.bytes() = ipc::BigBuffer(bytes);
return Some(std::move(recording));
}
void CompositionRecorder::ClearCollectedFrames() { mCollectedFrames.Clear(); }
} // namespace layers
} // namespace mozilla

Просмотреть файл

@ -9,6 +9,7 @@
#include "mozilla/RefPtr.h"
#include "mozilla/TimeStamp.h"
#include "mozilla/layers/PCompositorBridgeTypes.h"
#include "nsISupportsImpl.h"
#include "nsTArray.h"
#include "nsString.h"
@ -41,28 +42,6 @@ class RecordedFrame {
TimeStamp mTimeStamp;
};
/**
* A recorded frame that has been encoded into a data: URI.
*/
struct CollectedFrame {
CollectedFrame(double aTimeOffset, nsCString&& aDataUri)
: mTimeOffset(aTimeOffset), mDataUri(std::move(aDataUri)) {}
double mTimeOffset;
nsCString mDataUri;
};
/**
* All of the frames collected during a composition recording session.
*/
struct CollectedFrames {
CollectedFrames(double aRecordingStart, nsTArray<CollectedFrame>&& aFrames)
: mRecordingStart(aRecordingStart), mFrames(std::move(aFrames)) {}
double mRecordingStart;
nsTArray<CollectedFrame> mFrames;
};
/**
* A recorder for composited frames.
*
@ -82,20 +61,14 @@ class CompositionRecorder {
void RecordFrame(RecordedFrame* aFrame);
/**
* Write out the collected frames as a series of timestamped images.
* Get the array of frames that were recorded since the last call to
* GetRecording(), where each frame consists of a presentation time and
* the PNG-encoded contents as an array of bytes
*/
void WriteCollectedFrames();
/**
* Return the collected frames as an array of their timestamps and contents.
*/
CollectedFrames GetCollectedFrames();
protected:
void ClearCollectedFrames();
Maybe<FrameRecording> GetRecording();
private:
nsTArray<RefPtr<RecordedFrame>> mCollectedFrames;
nsTArray<RefPtr<RecordedFrame>> mRecordedFrames;
TimeStamp mRecordingStart;
};

Просмотреть файл

@ -1871,42 +1871,22 @@ mozilla::ipc::IPCResult CompositorBridgeParent::RecvBeginRecording(
return IPC_OK();
}
mozilla::ipc::IPCResult CompositorBridgeParent::RecvEndRecordingToDisk(
EndRecordingToDiskResolver&& aResolve) {
if (!mHaveCompositionRecorder) {
aResolve(false);
return IPC_OK();
}
if (mWrBridge) {
mWrBridge->WriteCollectedFrames()->Then(
NS_GetCurrentThread(), __func__,
[resolve{aResolve}](const bool success) { resolve(success); },
[resolve{aResolve}]() { resolve(false); });
} else {
aResolve(false);
}
mHaveCompositionRecorder = false;
return IPC_OK();
}
mozilla::ipc::IPCResult CompositorBridgeParent::RecvEndRecordingToMemory(
EndRecordingToMemoryResolver&& aResolve) {
mozilla::ipc::IPCResult CompositorBridgeParent::RecvEndRecording(
EndRecordingResolver&& aResolve) {
if (!mHaveCompositionRecorder) {
aResolve(Nothing());
return IPC_OK();
}
if (mWrBridge) {
RefPtr<CompositorBridgeParent> self = this;
mWrBridge->GetCollectedFrames()->Then(
mWrBridge->EndRecording()->Then(
NS_GetCurrentThread(), __func__,
[self, resolve{aResolve}](CollectedFrames&& frames) {
resolve(self->WrapCollectedFrames(std::move(frames)));
[resolve{aResolve}](FrameRecording&& recording) {
resolve(Some(std::move(recording)));
},
[resolve{aResolve}]() { resolve(Nothing()); });
} else {
aResolve(Nothing());
}
mHaveCompositionRecorder = false;
@ -1914,37 +1894,6 @@ mozilla::ipc::IPCResult CompositorBridgeParent::RecvEndRecordingToMemory(
return IPC_OK();
}
Maybe<CollectedFramesParams> CompositorBridgeParent::WrapCollectedFrames(
CollectedFrames&& aFrames) {
CollectedFramesParams ipcFrames;
ipcFrames.recordingStart() = aFrames.mRecordingStart;
size_t totalLength = 0;
for (const CollectedFrame& frame : aFrames.mFrames) {
totalLength += frame.mDataUri.Length();
}
Shmem shmem;
if (!AllocShmem(totalLength, &shmem)) {
return Nothing();
}
{
char* raw = shmem.get<char>();
for (CollectedFrame& frame : aFrames.mFrames) {
size_t length = frame.mDataUri.Length();
PodCopy(raw, frame.mDataUri.get(), length);
raw += length;
ipcFrames.frames().EmplaceBack(frame.mTimeOffset, length);
}
}
ipcFrames.buffer() = std::move(shmem);
return Some(std::move(ipcFrames));
}
void RecordCompositionPayloadsPresented(
const TimeStamp& aCompositionEndTime,
const nsTArray<CompositionPayload>& aPayloads) {

Просмотреть файл

@ -191,10 +191,8 @@ class CompositorBridgeParentBase : public PCompositorBridgeParent,
const wr::RenderReasons& aReasons) = 0;
virtual mozilla::ipc::IPCResult RecvBeginRecording(
const TimeStamp& aRecordingStart, BeginRecordingResolver&& aResolve) = 0;
virtual mozilla::ipc::IPCResult RecvEndRecordingToDisk(
EndRecordingToDiskResolver&& aResolve) = 0;
virtual mozilla::ipc::IPCResult RecvEndRecordingToMemory(
EndRecordingToMemoryResolver&& aResolve) = 0;
virtual mozilla::ipc::IPCResult RecvEndRecording(
EndRecordingResolver&& aResolve) = 0;
virtual mozilla::ipc::IPCResult RecvInitialize(
const LayersId& rootLayerTreeId) = 0;
virtual mozilla::ipc::IPCResult RecvWillClose() = 0;
@ -299,10 +297,8 @@ class CompositorBridgeParent final : public CompositorBridgeParentBase,
mozilla::ipc::IPCResult RecvBeginRecording(
const TimeStamp& aRecordingStart,
BeginRecordingResolver&& aResolve) override;
mozilla::ipc::IPCResult RecvEndRecordingToDisk(
EndRecordingToDiskResolver&& aResolve) override;
mozilla::ipc::IPCResult RecvEndRecordingToMemory(
EndRecordingToMemoryResolver&& aResolve) override;
mozilla::ipc::IPCResult RecvEndRecording(
EndRecordingResolver&& aResolve) override;
void NotifyMemoryPressure() override;
void AccumulateMemoryReport(wr::MemoryReport*) override;
@ -557,11 +553,6 @@ class CompositorBridgeParent final : public CompositorBridgeParentBase,
*/
static void UpdateWebRenderProfilerUI();
/**
* Wrap the data structure to be sent over IPC.
*/
Maybe<CollectedFramesParams> WrapCollectedFrames(CollectedFrames&& aFrames);
static void ResetStable();
void MaybeDeclareStable();

Просмотреть файл

@ -91,14 +91,8 @@ class ContentCompositorBridgeParent final : public CompositorBridgeParentBase {
return IPC_OK();
}
mozilla::ipc::IPCResult RecvEndRecordingToDisk(
EndRecordingToDiskResolver&& aResolve) override {
aResolve(false);
return IPC_OK();
}
mozilla::ipc::IPCResult RecvEndRecordingToMemory(
EndRecordingToMemoryResolver&& aResolve) override {
mozilla::ipc::IPCResult RecvEndRecording(
EndRecordingResolver&& aResolve) override {
aResolve(Nothing());
return IPC_OK();
}

Просмотреть файл

@ -203,11 +203,8 @@ parent:
async BeginRecording(TimeStamp aRecordingStart)
returns (bool success);
async EndRecordingToDisk()
returns (bool success);
async EndRecordingToMemory()
returns (CollectedFramesParams? frames);
async EndRecording()
returns (FrameRecording? recording);
// To set up sharing the composited output to Firefox Reality on Desktop
async RequestFxrOutput();

Просмотреть файл

@ -2,18 +2,21 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
using mozilla::TimeStamp from "mozilla/TimeStamp.h";
[MoveOnly] using class mozilla::ipc::BigBuffer from "mozilla/ipc/BigBuffer.h";
namespace mozilla {
namespace layers {
struct CollectedFrameParams {
double timeOffset;
struct RecordedFrameData {
TimeStamp timeOffset;
uint32_t length;
};
struct CollectedFramesParams {
double recordingStart;
CollectedFrameParams[] frames;
Shmem buffer;
struct FrameRecording {
TimeStamp startTime;
RecordedFrameData[] frames;
BigBuffer bytes;
};
} // namespace layers

Просмотреть файл

@ -964,14 +964,9 @@ void WebRenderBridgeParent::BeginRecording(const TimeStamp& aRecordingStart) {
mApi->BeginRecording(aRecordingStart, mPipelineId);
}
RefPtr<wr::WebRenderAPI::WriteCollectedFramesPromise>
WebRenderBridgeParent::WriteCollectedFrames() {
return mApi->WriteCollectedFrames();
}
RefPtr<wr::WebRenderAPI::GetCollectedFramesPromise>
WebRenderBridgeParent::GetCollectedFrames() {
return mApi->GetCollectedFrames();
RefPtr<wr::WebRenderAPI::EndRecordingPromise>
WebRenderBridgeParent::EndRecording() {
return mApi->EndRecording();
}
void WebRenderBridgeParent::AddPendingScrollPayload(

Просмотреть файл

@ -290,13 +290,6 @@ class WebRenderBridgeParent final : public PWebRenderBridgeParent,
void BeginRecording(const TimeStamp& aRecordingStart);
/**
* Write the frames collected since the call to BeginRecording to disk.
*
* If there is not currently a recorder, this is a no-op.
*/
RefPtr<wr::WebRenderAPI::WriteCollectedFramesPromise> WriteCollectedFrames();
#if defined(MOZ_WIDGET_ANDROID)
/**
* Request a screengrab for android
@ -305,13 +298,9 @@ class WebRenderBridgeParent final : public PWebRenderBridgeParent,
void MaybeCaptureScreenPixels();
#endif
/**
* Return the frames collected since the call to BeginRecording encoded
* as data URIs.
*
* If there is not currently a recorder, this is a no-op and the promise will
* be rejected.
* Stop recording and the frames collected since the call to BeginRecording
*/
RefPtr<wr::WebRenderAPI::GetCollectedFramesPromise> GetCollectedFrames();
RefPtr<wr::WebRenderAPI::EndRecordingPromise> EndRecording();
void DisableNativeCompositor();
void AddPendingScrollPayload(CompositionPayload& aPayload,

Просмотреть файл

@ -378,21 +378,13 @@ void RenderThread::BeginRecordingForWindow(wr::WindowId aWindowId,
renderer->BeginRecording(aRecordingStart, aRootPipelineId);
}
void RenderThread::WriteCollectedFramesForWindow(wr::WindowId aWindowId) {
MOZ_ASSERT(IsInRenderThread());
RendererOGL* renderer = GetRenderer(aWindowId);
MOZ_ASSERT(renderer);
renderer->WriteCollectedFrames();
}
Maybe<layers::CollectedFrames> RenderThread::GetCollectedFramesForWindow(
Maybe<layers::FrameRecording> RenderThread::EndRecordingForWindow(
wr::WindowId aWindowId) {
MOZ_ASSERT(IsInRenderThread());
RendererOGL* renderer = GetRenderer(aWindowId);
MOZ_ASSERT(renderer);
return renderer->GetCollectedFrames();
return renderer->EndRecording();
}
void RenderThread::HandleFrameOneDoc(wr::WindowId aWindowId, bool aRender,

Просмотреть файл

@ -291,10 +291,7 @@ class RenderThread final {
const TimeStamp& aRecordingStart,
wr::PipelineId aRootPipelineId);
void WriteCollectedFramesForWindow(wr::WindowId aWindowId);
Maybe<layers::CollectedFrames> GetCollectedFramesForWindow(
wr::WindowId aWindowId);
Maybe<layers::FrameRecording> EndRecordingForWindow(wr::WindowId aWindowId);
static void MaybeEnableGLDebugMessage(gl::GLContext* aGLContext);

Просмотреть файл

@ -375,37 +375,22 @@ bool RendererOGL::DidPaintContent(const WebRenderPipelineInfo* aFrameEpochs) {
return didPaintContent;
}
void RendererOGL::WriteCollectedFrames() {
if (!mCompositionRecorder) {
MOZ_DIAGNOSTIC_ASSERT(
false,
"Attempted to write frames from a window that was not recording.");
return;
}
mCompositionRecorder->WriteCollectedFrames();
wr_renderer_release_composition_recorder_structures(mRenderer);
mCompositor->MaybeRequestAllowFrameRecording(false);
mCompositionRecorder = nullptr;
}
Maybe<layers::CollectedFrames> RendererOGL::GetCollectedFrames() {
Maybe<layers::FrameRecording> RendererOGL::EndRecording() {
if (!mCompositionRecorder) {
MOZ_DIAGNOSTIC_ASSERT(
false, "Attempted to get frames from a window that was not recording.");
return Nothing();
}
layers::CollectedFrames frames = mCompositionRecorder->GetCollectedFrames();
auto maybeRecording = mCompositionRecorder->GetRecording();
wr_renderer_release_composition_recorder_structures(mRenderer);
mCompositor->MaybeRequestAllowFrameRecording(false);
mCompositionRecorder = nullptr;
return Some(std::move(frames));
return maybeRecording;
}
void RendererOGL::FlushPipelineInfo() {

Просмотреть файл

@ -86,8 +86,8 @@ class RendererOGL {
void BeginRecording(const TimeStamp& aRecordingStart,
wr::PipelineId aPipelineId);
void MaybeRecordFrame(const WebRenderPipelineInfo* aPipelineInfo);
void WriteCollectedFrames();
Maybe<layers::CollectedFrames> GetCollectedFrames();
Maybe<layers::FrameRecording> EndRecording();
/// This can be called on the render thread only.
~RendererOGL();

Просмотреть файл

@ -700,66 +700,33 @@ void WebRenderAPI::BeginRecording(const TimeStamp& aRecordingStart,
RunOnRenderThread(std::move(event));
}
RefPtr<WebRenderAPI::WriteCollectedFramesPromise>
WebRenderAPI::WriteCollectedFrames() {
class WriteCollectedFramesEvent final : public RendererEvent {
RefPtr<WebRenderAPI::EndRecordingPromise> WebRenderAPI::EndRecording() {
class EndRecordingEvent final : public RendererEvent {
public:
explicit WriteCollectedFramesEvent() {
MOZ_COUNT_CTOR(WriteCollectedFramesEvent);
}
explicit EndRecordingEvent() { MOZ_COUNT_CTOR(EndRecordingEvent); }
MOZ_COUNTED_DTOR(WriteCollectedFramesEvent)
MOZ_COUNTED_DTOR(EndRecordingEvent);
void Run(RenderThread& aRenderThread, WindowId aWindowId) override {
aRenderThread.WriteCollectedFramesForWindow(aWindowId);
mPromise.Resolve(true, __func__);
}
Maybe<layers::FrameRecording> recording =
aRenderThread.EndRecordingForWindow(aWindowId);
RefPtr<WebRenderAPI::WriteCollectedFramesPromise> GetPromise() {
return mPromise.Ensure(__func__);
}
private:
MozPromiseHolder<WebRenderAPI::WriteCollectedFramesPromise> mPromise;
};
auto event = MakeUnique<WriteCollectedFramesEvent>();
auto promise = event->GetPromise();
RunOnRenderThread(std::move(event));
return promise;
}
RefPtr<WebRenderAPI::GetCollectedFramesPromise>
WebRenderAPI::GetCollectedFrames() {
class GetCollectedFramesEvent final : public RendererEvent {
public:
explicit GetCollectedFramesEvent() {
MOZ_COUNT_CTOR(GetCollectedFramesEvent);
}
MOZ_COUNTED_DTOR(GetCollectedFramesEvent);
void Run(RenderThread& aRenderThread, WindowId aWindowId) override {
Maybe<layers::CollectedFrames> frames =
aRenderThread.GetCollectedFramesForWindow(aWindowId);
if (frames) {
mPromise.Resolve(std::move(*frames), __func__);
if (recording) {
mPromise.Resolve(recording.extract(), __func__);
} else {
mPromise.Reject(NS_ERROR_UNEXPECTED, __func__);
}
}
RefPtr<WebRenderAPI::GetCollectedFramesPromise> GetPromise() {
RefPtr<WebRenderAPI::EndRecordingPromise> GetPromise() {
return mPromise.Ensure(__func__);
}
private:
MozPromiseHolder<WebRenderAPI::GetCollectedFramesPromise> mPromise;
MozPromiseHolder<WebRenderAPI::EndRecordingPromise> mPromise;
};
auto event = MakeUnique<GetCollectedFramesEvent>();
auto event = MakeUnique<EndRecordingEvent>();
auto promise = event->GetPromise();
RunOnRenderThread(std::move(event));

Просмотреть файл

@ -297,25 +297,10 @@ class WebRenderAPI final {
void BeginRecording(const TimeStamp& aRecordingStart,
wr::PipelineId aRootPipelineId);
typedef MozPromise<bool, nsresult, true> WriteCollectedFramesPromise;
typedef MozPromise<layers::CollectedFrames, nsresult, true>
GetCollectedFramesPromise;
typedef MozPromise<layers::FrameRecording, nsresult, true>
EndRecordingPromise;
/**
* Write the frames collected since the call to BeginRecording() to disk.
*
* If there is not currently a recorder, this is a no-op.
*/
RefPtr<WriteCollectedFramesPromise> WriteCollectedFrames();
/**
* Return the frames collected since the call to BeginRecording() encoded
* as data URIs.
*
* If there is not currently a recorder, this is a no-op and the promise will
* be rejected.
*/
RefPtr<GetCollectedFramesPromise> GetCollectedFrames();
RefPtr<EndRecordingPromise> EndRecording();
protected:
WebRenderAPI(wr::DocumentHandle* aHandle, wr::WindowId aId,