diff --git a/dom/canvas/WebGLContext.cpp b/dom/canvas/WebGLContext.cpp index 0fc1ef0a984b..ae134e2a93ca 100644 --- a/dom/canvas/WebGLContext.cpp +++ b/dom/canvas/WebGLContext.cpp @@ -83,7 +83,7 @@ #endif #if defined(MOZ_WIDGET_ANDROID) - #include "../../gfx/vr/gfxVRExternal.h" +#include "mozilla/layers/ImageBridgeChild.h" #endif // Generated @@ -236,6 +236,9 @@ WebGLContext::DestroyResourcesAndContext() mDefaultVertexArray = nullptr; mBoundTransformFeedback = nullptr; mDefaultTransformFeedback = nullptr; +#if defined(MOZ_WIDGET_ANDROID) + mVRScreen = nullptr; +#endif mQuerySlot_SamplesPassed = nullptr; mQuerySlot_TFPrimsWritten = nullptr; @@ -788,7 +791,7 @@ WebGLContext::SetDimensions(int32_t signedWidth, int32_t signedHeight) return NS_OK; // If we've already drawn, we should commit the current buffer. - PresentScreenBuffer(); + PresentScreenBuffer(gl->Screen()); if (IsContextLost()) { GenerateWarning("WebGL context was lost due to swap failure."); @@ -1463,7 +1466,7 @@ WebGLContext::BlitBackbufferToCurDriverFB() const // For an overview of how WebGL compositing works, see: // https://wiki.mozilla.org/Platform/GFX/WebGL/Compositing bool -WebGLContext::PresentScreenBuffer() +WebGLContext::PresentScreenBuffer(GLScreenBuffer* const targetScreen) { const FuncScope funcScope(*this, ""); if (IsContextLost()) @@ -1475,8 +1478,8 @@ WebGLContext::PresentScreenBuffer() if (!ValidateAndInitFB(nullptr)) return false; - const auto& screen = gl->Screen(); - if (screen->Size() != mDefaultFB->mSize && + const auto& screen = targetScreen ? targetScreen : gl->Screen(); + if ((!screen->IsReadBufferReady() || screen->Size() != mDefaultFB->mSize) && !screen->Resize(mDefaultFB->mSize)) { GenerateWarning("screen->Resize failed. Losing context."); @@ -1520,10 +1523,10 @@ WebGLContext::PresentScreenBuffer() // Prepare the context for capture before compositing void -WebGLContext::BeginComposition() +WebGLContext::BeginComposition(GLScreenBuffer* const screen) { // Present our screenbuffer, if needed. - PresentScreenBuffer(); + PresentScreenBuffer(screen); mDrawCallsSinceLastFlush = 0; } @@ -2301,47 +2304,40 @@ WebGLContext::GetUnpackSize(bool isFunc3D, uint32_t width, uint32_t height, already_AddRefed WebGLContext::GetVRFrame() { - if (!gl) - return nullptr; + if (!gl) + return nullptr; + + // Create a custom GLScreenBuffer for VR. + if (!mVRScreen) { + auto caps = gl->Screen()->mCaps; + mVRScreen = GLScreenBuffer::Create(gl, gfx::IntSize(1, 1), caps); - int frameId = gfx::impl::VRDisplayExternal::sPushIndex; - static int lastFrameId = -1; - /** - * Android doesn't like duplicated GetVRFrame within the same gfxVRExternal frame. - * Ballout forced composition calls if we are in the same VRExternal push frame index. - * Also discard frameId 0 because sometimes compositor is not paused yet due to channel communication delays. - */ - const bool ignoreFrame = lastFrameId == frameId || frameId == 0; - lastFrameId = frameId; - if (!ignoreFrame) { - BeginComposition(); - EndComposition(); - } + RefPtr imageBridge = ImageBridgeChild::GetSingleton(); + if (imageBridge) { + TextureFlags flags = TextureFlags::ORIGIN_BOTTOM_LEFT; + UniquePtr factory = gl::GLScreenBuffer::CreateFactory(gl, caps, imageBridge.get(), flags); + mVRScreen->Morph(std::move(factory)); + } + } - if (!gl) { - return nullptr; - } + // Swap buffers as though composition has occurred. + // We will then share the resulting front buffer to be submitted to the VR compositor. + BeginComposition(mVRScreen.get()); + EndComposition(); - gl::GLScreenBuffer* screen = gl->Screen(); - if (!screen) { - return nullptr; - } + if (IsContextLost()) + return nullptr; - RefPtr sharedSurface = screen->Front(); - if (!sharedSurface || !sharedSurface->Surf()) { - return nullptr; - } + RefPtr sharedSurface = mVRScreen->Front(); + if (!sharedSurface || !sharedSurface->Surf()) + return nullptr; - /** - * Make sure that the WebGL buffer is committed to the attached SurfaceTexture on Android. - */ - if (!ignoreFrame) { + // Make sure that the WebGL buffer is committed to the attached SurfaceTexture on Android. sharedSurface->Surf()->ProducerAcquire(); sharedSurface->Surf()->Commit(); sharedSurface->Surf()->ProducerRelease(); - } - return sharedSurface.forget(); + return sharedSurface.forget(); } #else already_AddRefed diff --git a/dom/canvas/WebGLContext.h b/dom/canvas/WebGLContext.h index 1817bb64c195..037ab5db3e5d 100644 --- a/dom/canvas/WebGLContext.h +++ b/dom/canvas/WebGLContext.h @@ -95,6 +95,7 @@ class VRLayerChild; } // namespace gfx namespace gl { +class GLScreenBuffer; class MozFramebuffer; } // namespace gl @@ -498,10 +499,10 @@ public: bool IsPreservingDrawingBuffer() const { return mOptions.preserveDrawingBuffer; } - bool PresentScreenBuffer(); + bool PresentScreenBuffer(gl::GLScreenBuffer* const screen = nullptr); // Prepare the context for capture before compositing - void BeginComposition(); + void BeginComposition(gl::GLScreenBuffer* const screen = nullptr); // Clean up the context after captured for compositing void EndComposition(); @@ -1980,6 +1981,9 @@ protected: bool mNeedsIndexValidation = false; const bool mAllowFBInvalidation; +#if defined(MOZ_WIDGET_ANDROID) + UniquePtr mVRScreen; +#endif bool Has64BitTimestamps() const; diff --git a/gfx/gl/GLScreenBuffer.h b/gfx/gl/GLScreenBuffer.h index d7bab1574843..50961f5e68f1 100644 --- a/gfx/gl/GLScreenBuffer.h +++ b/gfx/gl/GLScreenBuffer.h @@ -222,6 +222,10 @@ public: return mRead->Size(); } + bool IsReadBufferReady() const { + return mRead.get() != nullptr; + } + void BindAsFramebuffer(GLContext* const gl, GLenum target) const; void RequireBlit(); diff --git a/gfx/vr/VRDisplayHost.cpp b/gfx/vr/VRDisplayHost.cpp index 580744a06bf6..8f0e0437308f 100644 --- a/gfx/vr/VRDisplayHost.cpp +++ b/gfx/vr/VRDisplayHost.cpp @@ -28,6 +28,9 @@ #if defined(MOZ_WIDGET_ANDROID) #include "mozilla/layers/CompositorThread.h" +// Max frame duration on Android before the watchdog submits a new one. +// Probably we can get rid of this when we enforce that SubmitFrame can only be called in a VRDisplay loop. +#define ANDROID_MAX_FRAME_DURATION 4000 #endif // defined(MOZ_WIDGET_ANDROID) @@ -79,6 +82,11 @@ VRDisplayHost::VRDisplayHost(VRDeviceType aType) mDisplayInfo.mFrameId = 0; mDisplayInfo.mDisplayState.mPresentingGeneration = 0; mDisplayInfo.mDisplayState.mDisplayName[0] = '\0'; + +#if defined(MOZ_WIDGET_ANDROID) + mLastSubmittedFrameId = 0; + mLastStartedFrame = 0; +#endif // defined(MOZ_WIDGET_ANDROID) } VRDisplayHost::~VRDisplayHost() @@ -200,10 +208,24 @@ VRDisplayHost::StartFrame() { AUTO_PROFILER_TRACING("VR", "GetSensorState"); +#if defined(MOZ_WIDGET_ANDROID) + const bool isPresenting = mLastUpdateDisplayInfo.GetPresentingGroups() != 0; + double duration = mLastFrameStart.IsNull() ? 0.0 : (TimeStamp::Now() - mLastFrameStart).ToMilliseconds(); + /** + * Do not start more VR frames until the last submitted frame is already processed. + */ + if (isPresenting && mLastStartedFrame > 0 && mDisplayInfo.mDisplayState.mLastSubmittedFrameId < mLastStartedFrame && duration < (double)ANDROID_MAX_FRAME_DURATION) { + return; + } +#endif // !defined(MOZ_WIDGET_ANDROID) + mLastFrameStart = TimeStamp::Now(); ++mDisplayInfo.mFrameId; mDisplayInfo.mLastSensorState[mDisplayInfo.mFrameId % kVRMaxLatencyFrames] = GetSensorState(); mFrameStarted = true; +#if defined(MOZ_WIDGET_ANDROID) + mLastStartedFrame = mDisplayInfo.mFrameId; +#endif // !defined(MOZ_WIDGET_ANDROID) } void @@ -316,6 +338,19 @@ VRDisplayHost::SubmitFrame(VRLayerParent* aLayer, return; } +#if defined(MOZ_WIDGET_ANDROID) + /** + * Do not queue more submit frames until the last submitted frame is already processed + * and the new WebGL texture is ready. + */ + if (mLastSubmittedFrameId > 0 && mLastSubmittedFrameId != mDisplayInfo.mDisplayState.mLastSubmittedFrameId) { + mLastStartedFrame = 0; + return; + } + + mLastSubmittedFrameId = aFrameId; +#endif // !defined(MOZ_WIDGET_ANDROID) + mFrameStarted = false; RefPtr submit = diff --git a/gfx/vr/VRDisplayHost.h b/gfx/vr/VRDisplayHost.h index 0f05d7e79923..223d44f407c8 100644 --- a/gfx/vr/VRDisplayHost.h +++ b/gfx/vr/VRDisplayHost.h @@ -101,6 +101,11 @@ private: VRDisplayInfo mLastUpdateDisplayInfo; TimeStamp mLastFrameStart; bool mFrameStarted; +#if defined(MOZ_WIDGET_ANDROID) +protected: + uint64_t mLastSubmittedFrameId; + uint64_t mLastStartedFrame; +#endif // defined(MOZ_WIDGET_ANDROID) #if defined(XP_WIN) protected: diff --git a/gfx/vr/VRDisplayPresentation.cpp b/gfx/vr/VRDisplayPresentation.cpp index 0f1709d968cf..bb6c1dee4e45 100644 --- a/gfx/vr/VRDisplayPresentation.cpp +++ b/gfx/vr/VRDisplayPresentation.cpp @@ -141,7 +141,7 @@ VRDisplayPresentation::~VRDisplayPresentation() void VRDisplayPresentation::SubmitFrame() { for (VRLayerChild *layer : mLayers) { - layer->SubmitFrame(mDisplayClient->GetDisplayInfo().GetFrameId()); + layer->SubmitFrame(mDisplayClient->GetDisplayInfo()); break; // Currently only one layer supported, submit only the first } } diff --git a/gfx/vr/gfxVRExternal.cpp b/gfx/vr/gfxVRExternal.cpp index b81de9ef754f..44f37d81ef02 100644 --- a/gfx/vr/gfxVRExternal.cpp +++ b/gfx/vr/gfxVRExternal.cpp @@ -52,8 +52,6 @@ using namespace mozilla::gfx::impl; using namespace mozilla::layers; using namespace mozilla::dom; -int VRDisplayExternal::sPushIndex = 0; - VRDisplayExternal::VRDisplayExternal(const VRDisplayState& aDisplayState) : VRDisplayHost(VRDeviceType::External) , mBrowserState{} @@ -108,7 +106,6 @@ VRDisplayExternal::StartPresentation() if (mBrowserState.presentationActive) { return; } - sPushIndex = 0; mTelemetry.Clear(); mTelemetry.mPresentationStart = TimeStamp::Now(); @@ -118,6 +115,8 @@ VRDisplayExternal::StartPresentation() PushState(); #if defined(MOZ_WIDGET_ANDROID) + mLastSubmittedFrameId = 0; + mLastStartedFrame = 0; /** * Android compositor is paused when presentation starts. That causes VRManager::NotifyVsync() not to be called. * We post a VRTask to call VRManager::NotifyVsync() while the compositor is paused on Android. @@ -162,7 +161,6 @@ VRDisplayExternal::StopPresentation() if (!mBrowserState.presentationActive) { return; } - sPushIndex = 0; // Indicate that we have stopped immersive mode mBrowserState.presentationActive = false; @@ -276,7 +274,6 @@ VRDisplayExternal::SubmitFrame(const layers::SurfaceDescriptor& aTexture, layer.mRightEyeRect.height = aRightEyeRect.height; PushState(true); - sPushIndex++; #if defined(MOZ_WIDGET_ANDROID) PullState([&]() { diff --git a/gfx/vr/gfxVRExternal.h b/gfx/vr/gfxVRExternal.h index 7e7105455bed..bd6d16dca2a8 100644 --- a/gfx/vr/gfxVRExternal.h +++ b/gfx/vr/gfxVRExternal.h @@ -31,7 +31,6 @@ class VRDisplayExternal : public VRDisplayHost { public: void ZeroSensor() override; - static int sPushIndex; protected: VRHMDSensorState GetSensorState() override; diff --git a/gfx/vr/ipc/VRLayerChild.cpp b/gfx/vr/ipc/VRLayerChild.cpp index 028fa3f9bf4b..25fbd3758785 100644 --- a/gfx/vr/ipc/VRLayerChild.cpp +++ b/gfx/vr/ipc/VRLayerChild.cpp @@ -50,22 +50,38 @@ VRLayerChild::Initialize(dom::HTMLCanvasElement* aCanvasElement, } void -VRLayerChild::SubmitFrame(uint64_t aFrameId) +VRLayerChild::SubmitFrame(const VRDisplayInfo& aDisplayInfo) { + uint64_t frameId = aDisplayInfo.GetFrameId(); + // aFrameId will not increment unless the previuosly submitted // frame was received by the VR thread and submitted to the VR // compositor. We early-exit here in the event that SubmitFrame // was called twice for the same aFrameId. - if (!mCanvasElement || aFrameId == mLastSubmittedFrameId) { + if (!mCanvasElement || frameId == mLastSubmittedFrameId) { return; } - mLastSubmittedFrameId = aFrameId; // Keep the SharedSurfaceTextureClient alive long enough for // 1 extra frame, accomodating overlapped asynchronous rendering. mLastFrameTexture = mThisFrameTexture; +#if defined(MOZ_WIDGET_ANDROID) + /** + * Do not blit WebGL to a SurfaceTexture until the last submitted frame is already processed + * and the new frame poses are ready. SurfaceTextures need to be released in the VR render thread + * in order to allow to be used again in the WebGLContext GLScreenBuffer producer. + * Not doing so causes some freezes, crashes or other undefined behaviour. + */ + if (!mThisFrameTexture || aDisplayInfo.mDisplayState.mLastSubmittedFrameId == mLastSubmittedFrameId) { + mThisFrameTexture = mCanvasElement->GetVRFrame(); + } +#else mThisFrameTexture = mCanvasElement->GetVRFrame(); +#endif // defined(MOZ_WIDGET_ANDROID) + + mLastSubmittedFrameId = frameId; + if (!mThisFrameTexture) { return; } @@ -90,7 +106,7 @@ VRLayerChild::SubmitFrame(uint64_t aFrameId) return; } - SendSubmitFrame(desc, aFrameId, mLeftEyeRect, mRightEyeRect); + SendSubmitFrame(desc, frameId, mLeftEyeRect, mRightEyeRect); } bool diff --git a/gfx/vr/ipc/VRLayerChild.h b/gfx/vr/ipc/VRLayerChild.h index d69b1f0f4ea5..89ca8336ed0d 100644 --- a/gfx/vr/ipc/VRLayerChild.h +++ b/gfx/vr/ipc/VRLayerChild.h @@ -37,7 +37,7 @@ public: void Initialize(dom::HTMLCanvasElement* aCanvasElement, const gfx::Rect& aLeftEyeRect, const gfx::Rect& aRightEyeRect); - void SubmitFrame(uint64_t aFrameId); + void SubmitFrame(const VRDisplayInfo& aDisplayInfo); bool IsIPCOpen(); private: