Bug 1482613 - Make WebVR work with multiprocess enabled on Android r=kip,rbarker,kvark

MozReview-Commit-ID: G9aHbp0G7DK

Differential Revision: https://phabricator.services.mozilla.com/D3152

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Imanol Fernandez 2018-08-24 15:32:12 +00:00
Родитель 9b4034d1f9
Коммит 2072f84821
10 изменённых файлов: 109 добавлений и 53 удалений

Просмотреть файл

@ -83,7 +83,7 @@
#endif
#if defined(MOZ_WIDGET_ANDROID)
#include "../../gfx/vr/gfxVRExternal.h"
#include "mozilla/layers/ImageBridgeChild.h"
#endif
// Generated
@ -236,6 +236,9 @@ WebGLContext::DestroyResourcesAndContext()
mDefaultVertexArray = nullptr;
mBoundTransformFeedback = nullptr;
mDefaultTransformFeedback = nullptr;
#if defined(MOZ_WIDGET_ANDROID)
mVRScreen = nullptr;
#endif
mQuerySlot_SamplesPassed = nullptr;
mQuerySlot_TFPrimsWritten = nullptr;
@ -788,7 +791,7 @@ WebGLContext::SetDimensions(int32_t signedWidth, int32_t signedHeight)
return NS_OK;
// If we've already drawn, we should commit the current buffer.
PresentScreenBuffer();
PresentScreenBuffer(gl->Screen());
if (IsContextLost()) {
GenerateWarning("WebGL context was lost due to swap failure.");
@ -1463,7 +1466,7 @@ WebGLContext::BlitBackbufferToCurDriverFB() const
// For an overview of how WebGL compositing works, see:
// https://wiki.mozilla.org/Platform/GFX/WebGL/Compositing
bool
WebGLContext::PresentScreenBuffer()
WebGLContext::PresentScreenBuffer(GLScreenBuffer* const targetScreen)
{
const FuncScope funcScope(*this, "<PresentScreenBuffer>");
if (IsContextLost())
@ -1475,8 +1478,8 @@ WebGLContext::PresentScreenBuffer()
if (!ValidateAndInitFB(nullptr))
return false;
const auto& screen = gl->Screen();
if (screen->Size() != mDefaultFB->mSize &&
const auto& screen = targetScreen ? targetScreen : gl->Screen();
if ((!screen->IsReadBufferReady() || screen->Size() != mDefaultFB->mSize) &&
!screen->Resize(mDefaultFB->mSize))
{
GenerateWarning("screen->Resize failed. Losing context.");
@ -1520,10 +1523,10 @@ WebGLContext::PresentScreenBuffer()
// Prepare the context for capture before compositing
void
WebGLContext::BeginComposition()
WebGLContext::BeginComposition(GLScreenBuffer* const screen)
{
// Present our screenbuffer, if needed.
PresentScreenBuffer();
PresentScreenBuffer(screen);
mDrawCallsSinceLastFlush = 0;
}
@ -2301,47 +2304,40 @@ WebGLContext::GetUnpackSize(bool isFunc3D, uint32_t width, uint32_t height,
already_AddRefed<layers::SharedSurfaceTextureClient>
WebGLContext::GetVRFrame()
{
if (!gl)
return nullptr;
if (!gl)
return nullptr;
// Create a custom GLScreenBuffer for VR.
if (!mVRScreen) {
auto caps = gl->Screen()->mCaps;
mVRScreen = GLScreenBuffer::Create(gl, gfx::IntSize(1, 1), caps);
int frameId = gfx::impl::VRDisplayExternal::sPushIndex;
static int lastFrameId = -1;
/**
* Android doesn't like duplicated GetVRFrame within the same gfxVRExternal frame.
* Ballout forced composition calls if we are in the same VRExternal push frame index.
* Also discard frameId 0 because sometimes compositor is not paused yet due to channel communication delays.
*/
const bool ignoreFrame = lastFrameId == frameId || frameId == 0;
lastFrameId = frameId;
if (!ignoreFrame) {
BeginComposition();
EndComposition();
}
RefPtr<ImageBridgeChild> imageBridge = ImageBridgeChild::GetSingleton();
if (imageBridge) {
TextureFlags flags = TextureFlags::ORIGIN_BOTTOM_LEFT;
UniquePtr<gl::SurfaceFactory> factory = gl::GLScreenBuffer::CreateFactory(gl, caps, imageBridge.get(), flags);
mVRScreen->Morph(std::move(factory));
}
}
if (!gl) {
return nullptr;
}
// Swap buffers as though composition has occurred.
// We will then share the resulting front buffer to be submitted to the VR compositor.
BeginComposition(mVRScreen.get());
EndComposition();
gl::GLScreenBuffer* screen = gl->Screen();
if (!screen) {
return nullptr;
}
if (IsContextLost())
return nullptr;
RefPtr<SharedSurfaceTextureClient> sharedSurface = screen->Front();
if (!sharedSurface || !sharedSurface->Surf()) {
return nullptr;
}
RefPtr<SharedSurfaceTextureClient> sharedSurface = mVRScreen->Front();
if (!sharedSurface || !sharedSurface->Surf())
return nullptr;
/**
* Make sure that the WebGL buffer is committed to the attached SurfaceTexture on Android.
*/
if (!ignoreFrame) {
// Make sure that the WebGL buffer is committed to the attached SurfaceTexture on Android.
sharedSurface->Surf()->ProducerAcquire();
sharedSurface->Surf()->Commit();
sharedSurface->Surf()->ProducerRelease();
}
return sharedSurface.forget();
return sharedSurface.forget();
}
#else
already_AddRefed<layers::SharedSurfaceTextureClient>

Просмотреть файл

@ -95,6 +95,7 @@ class VRLayerChild;
} // namespace gfx
namespace gl {
class GLScreenBuffer;
class MozFramebuffer;
} // namespace gl
@ -498,10 +499,10 @@ public:
bool IsPreservingDrawingBuffer() const { return mOptions.preserveDrawingBuffer; }
bool PresentScreenBuffer();
bool PresentScreenBuffer(gl::GLScreenBuffer* const screen = nullptr);
// Prepare the context for capture before compositing
void BeginComposition();
void BeginComposition(gl::GLScreenBuffer* const screen = nullptr);
// Clean up the context after captured for compositing
void EndComposition();
@ -1980,6 +1981,9 @@ protected:
bool mNeedsIndexValidation = false;
const bool mAllowFBInvalidation;
#if defined(MOZ_WIDGET_ANDROID)
UniquePtr<gl::GLScreenBuffer> mVRScreen;
#endif
bool Has64BitTimestamps() const;

Просмотреть файл

@ -222,6 +222,10 @@ public:
return mRead->Size();
}
bool IsReadBufferReady() const {
return mRead.get() != nullptr;
}
void BindAsFramebuffer(GLContext* const gl, GLenum target) const;
void RequireBlit();

Просмотреть файл

@ -28,6 +28,9 @@
#if defined(MOZ_WIDGET_ANDROID)
#include "mozilla/layers/CompositorThread.h"
// Max frame duration on Android before the watchdog submits a new one.
// Probably we can get rid of this when we enforce that SubmitFrame can only be called in a VRDisplay loop.
#define ANDROID_MAX_FRAME_DURATION 4000
#endif // defined(MOZ_WIDGET_ANDROID)
@ -79,6 +82,11 @@ VRDisplayHost::VRDisplayHost(VRDeviceType aType)
mDisplayInfo.mFrameId = 0;
mDisplayInfo.mDisplayState.mPresentingGeneration = 0;
mDisplayInfo.mDisplayState.mDisplayName[0] = '\0';
#if defined(MOZ_WIDGET_ANDROID)
mLastSubmittedFrameId = 0;
mLastStartedFrame = 0;
#endif // defined(MOZ_WIDGET_ANDROID)
}
VRDisplayHost::~VRDisplayHost()
@ -200,10 +208,24 @@ VRDisplayHost::StartFrame()
{
AUTO_PROFILER_TRACING("VR", "GetSensorState");
#if defined(MOZ_WIDGET_ANDROID)
const bool isPresenting = mLastUpdateDisplayInfo.GetPresentingGroups() != 0;
double duration = mLastFrameStart.IsNull() ? 0.0 : (TimeStamp::Now() - mLastFrameStart).ToMilliseconds();
/**
* Do not start more VR frames until the last submitted frame is already processed.
*/
if (isPresenting && mLastStartedFrame > 0 && mDisplayInfo.mDisplayState.mLastSubmittedFrameId < mLastStartedFrame && duration < (double)ANDROID_MAX_FRAME_DURATION) {
return;
}
#endif // !defined(MOZ_WIDGET_ANDROID)
mLastFrameStart = TimeStamp::Now();
++mDisplayInfo.mFrameId;
mDisplayInfo.mLastSensorState[mDisplayInfo.mFrameId % kVRMaxLatencyFrames] = GetSensorState();
mFrameStarted = true;
#if defined(MOZ_WIDGET_ANDROID)
mLastStartedFrame = mDisplayInfo.mFrameId;
#endif // !defined(MOZ_WIDGET_ANDROID)
}
void
@ -316,6 +338,19 @@ VRDisplayHost::SubmitFrame(VRLayerParent* aLayer,
return;
}
#if defined(MOZ_WIDGET_ANDROID)
/**
* Do not queue more submit frames until the last submitted frame is already processed
* and the new WebGL texture is ready.
*/
if (mLastSubmittedFrameId > 0 && mLastSubmittedFrameId != mDisplayInfo.mDisplayState.mLastSubmittedFrameId) {
mLastStartedFrame = 0;
return;
}
mLastSubmittedFrameId = aFrameId;
#endif // !defined(MOZ_WIDGET_ANDROID)
mFrameStarted = false;
RefPtr<Runnable> submit =

Просмотреть файл

@ -101,6 +101,11 @@ private:
VRDisplayInfo mLastUpdateDisplayInfo;
TimeStamp mLastFrameStart;
bool mFrameStarted;
#if defined(MOZ_WIDGET_ANDROID)
protected:
uint64_t mLastSubmittedFrameId;
uint64_t mLastStartedFrame;
#endif // defined(MOZ_WIDGET_ANDROID)
#if defined(XP_WIN)
protected:

Просмотреть файл

@ -141,7 +141,7 @@ VRDisplayPresentation::~VRDisplayPresentation()
void VRDisplayPresentation::SubmitFrame()
{
for (VRLayerChild *layer : mLayers) {
layer->SubmitFrame(mDisplayClient->GetDisplayInfo().GetFrameId());
layer->SubmitFrame(mDisplayClient->GetDisplayInfo());
break; // Currently only one layer supported, submit only the first
}
}

Просмотреть файл

@ -52,8 +52,6 @@ using namespace mozilla::gfx::impl;
using namespace mozilla::layers;
using namespace mozilla::dom;
int VRDisplayExternal::sPushIndex = 0;
VRDisplayExternal::VRDisplayExternal(const VRDisplayState& aDisplayState)
: VRDisplayHost(VRDeviceType::External)
, mBrowserState{}
@ -108,7 +106,6 @@ VRDisplayExternal::StartPresentation()
if (mBrowserState.presentationActive) {
return;
}
sPushIndex = 0;
mTelemetry.Clear();
mTelemetry.mPresentationStart = TimeStamp::Now();
@ -118,6 +115,8 @@ VRDisplayExternal::StartPresentation()
PushState();
#if defined(MOZ_WIDGET_ANDROID)
mLastSubmittedFrameId = 0;
mLastStartedFrame = 0;
/**
* Android compositor is paused when presentation starts. That causes VRManager::NotifyVsync() not to be called.
* We post a VRTask to call VRManager::NotifyVsync() while the compositor is paused on Android.
@ -162,7 +161,6 @@ VRDisplayExternal::StopPresentation()
if (!mBrowserState.presentationActive) {
return;
}
sPushIndex = 0;
// Indicate that we have stopped immersive mode
mBrowserState.presentationActive = false;
@ -276,7 +274,6 @@ VRDisplayExternal::SubmitFrame(const layers::SurfaceDescriptor& aTexture,
layer.mRightEyeRect.height = aRightEyeRect.height;
PushState(true);
sPushIndex++;
#if defined(MOZ_WIDGET_ANDROID)
PullState([&]() {

Просмотреть файл

@ -31,7 +31,6 @@ class VRDisplayExternal : public VRDisplayHost
{
public:
void ZeroSensor() override;
static int sPushIndex;
protected:
VRHMDSensorState GetSensorState() override;

Просмотреть файл

@ -50,22 +50,38 @@ VRLayerChild::Initialize(dom::HTMLCanvasElement* aCanvasElement,
}
void
VRLayerChild::SubmitFrame(uint64_t aFrameId)
VRLayerChild::SubmitFrame(const VRDisplayInfo& aDisplayInfo)
{
uint64_t frameId = aDisplayInfo.GetFrameId();
// aFrameId will not increment unless the previuosly submitted
// frame was received by the VR thread and submitted to the VR
// compositor. We early-exit here in the event that SubmitFrame
// was called twice for the same aFrameId.
if (!mCanvasElement || aFrameId == mLastSubmittedFrameId) {
if (!mCanvasElement || frameId == mLastSubmittedFrameId) {
return;
}
mLastSubmittedFrameId = aFrameId;
// Keep the SharedSurfaceTextureClient alive long enough for
// 1 extra frame, accomodating overlapped asynchronous rendering.
mLastFrameTexture = mThisFrameTexture;
#if defined(MOZ_WIDGET_ANDROID)
/**
* Do not blit WebGL to a SurfaceTexture until the last submitted frame is already processed
* and the new frame poses are ready. SurfaceTextures need to be released in the VR render thread
* in order to allow to be used again in the WebGLContext GLScreenBuffer producer.
* Not doing so causes some freezes, crashes or other undefined behaviour.
*/
if (!mThisFrameTexture || aDisplayInfo.mDisplayState.mLastSubmittedFrameId == mLastSubmittedFrameId) {
mThisFrameTexture = mCanvasElement->GetVRFrame();
}
#else
mThisFrameTexture = mCanvasElement->GetVRFrame();
#endif // defined(MOZ_WIDGET_ANDROID)
mLastSubmittedFrameId = frameId;
if (!mThisFrameTexture) {
return;
}
@ -90,7 +106,7 @@ VRLayerChild::SubmitFrame(uint64_t aFrameId)
return;
}
SendSubmitFrame(desc, aFrameId, mLeftEyeRect, mRightEyeRect);
SendSubmitFrame(desc, frameId, mLeftEyeRect, mRightEyeRect);
}
bool

Просмотреть файл

@ -37,7 +37,7 @@ public:
void Initialize(dom::HTMLCanvasElement* aCanvasElement,
const gfx::Rect& aLeftEyeRect, const gfx::Rect& aRightEyeRect);
void SubmitFrame(uint64_t aFrameId);
void SubmitFrame(const VRDisplayInfo& aDisplayInfo);
bool IsIPCOpen();
private: