diff --git a/dom/media/platforms/apple/AppleVTDecoder.cpp b/dom/media/platforms/apple/AppleVTDecoder.cpp index d73928946a0e..a1dd2090b3ee 100644 --- a/dom/media/platforms/apple/AppleVTDecoder.cpp +++ b/dom/media/platforms/apple/AppleVTDecoder.cpp @@ -45,7 +45,10 @@ AppleVTDecoder::AppleVTDecoder(const VideoInfo& aConfig, ? *aConfig.mColorSpace : DefaultColorSpace({mPictureWidth, mPictureHeight})), mColorRange(aConfig.mColorRange), +#if defined(MAC_OS_VERSION_10_13) && \ + MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_VERSION_10_13 mColorDepth(aConfig.mColorDepth), +#endif mStreamType(MP4Decoder::IsH264(aConfig.mMimeType) ? StreamType::H264 : VPXDecoder::IsVP9(aConfig.mMimeType) ? StreamType::VP9 : StreamType::Unknown), @@ -427,28 +430,21 @@ void AppleVTDecoder::OutputFrame(CVPixelBufferRef aImage, CVPixelBufferUnlockBaseAddress(aImage, kCVPixelBufferLock_ReadOnly); } else { #ifndef MOZ_WIDGET_UIKIT - // Set pixel buffer properties on aImage before we extract its surface. - // This ensures that we can use defined enums to set values instead - // of later setting magic CFSTR values on the surface itself. - if (mColorSpace == gfx::YUVColorSpace::BT601) { - CVBufferSetAttachment(aImage, kCVImageBufferYCbCrMatrixKey, - kCVImageBufferYCbCrMatrix_ITU_R_601_4, - kCVAttachmentMode_ShouldPropagate); - } else if (mColorSpace == gfx::YUVColorSpace::BT2020) { - CVBufferSetAttachment(aImage, kCVImageBufferYCbCrMatrixKey, - kCVImageBufferYCbCrMatrix_ITU_R_2020, - kCVAttachmentMode_ShouldPropagate); - } else { - CVBufferSetAttachment(aImage, kCVImageBufferYCbCrMatrixKey, - kCVImageBufferYCbCrMatrix_ITU_R_709_2, - kCVAttachmentMode_ShouldPropagate); - } - CFTypeRefPtr surface = CFTypeRefPtr::WrapUnderGetRule( CVPixelBufferGetIOSurface(aImage)); MOZ_ASSERT(surface, "Decoder didn't return an IOSurface backed buffer"); + // Setup the correct YCbCr conversion matrix on the IOSurface, in case we + // pass this directly to CoreAnimation. + if (mColorSpace == gfx::YUVColorSpace::BT601) { + IOSurfaceSetValue(surface.get(), CFSTR("IOSurfaceYCbCrMatrix"), + CFSTR("ITU_R_601_4")); + } else { + IOSurfaceSetValue(surface.get(), CFSTR("IOSurfaceYCbCrMatrix"), + CFSTR("ITU_R_709_2")); + } + RefPtr macSurface = new MacIOSurface(std::move(surface)); macSurface->SetYUVColorSpace(mColorSpace); @@ -618,14 +614,13 @@ CFDictionaryRef AppleVTDecoder::CreateOutputConfiguration() { #ifndef MOZ_WIDGET_UIKIT // Output format type: - # if !defined(MAC_OS_VERSION_10_13) || \ MAC_OS_X_VERSION_MAX_ALLOWED < MAC_OS_VERSION_10_13 - enum : OSType { - kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange = 'x420', - kCVPixelFormatType_420YpCbCr10BiPlanarFullRange = 'xf20', - }; -# endif + SInt32 PixelFormatTypeValue = + mColorRange == gfx::ColorRange::FULL + ? kCVPixelFormatType_420YpCbCr8BiPlanarFullRange + : kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; +# else bool is10Bit = (gfx::BitDepthForColorDepth(mColorDepth) == 10); SInt32 PixelFormatTypeValue = mColorRange == gfx::ColorRange::FULL @@ -633,6 +628,7 @@ CFDictionaryRef AppleVTDecoder::CreateOutputConfiguration() { : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) : (is10Bit ? kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange : kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange); +# endif AutoCFRelease PixelFormatTypeNumber = CFNumberCreate( kCFAllocatorDefault, kCFNumberSInt32Type, &PixelFormatTypeValue); // Construct IOSurface Properties diff --git a/dom/media/platforms/apple/AppleVTDecoder.h b/dom/media/platforms/apple/AppleVTDecoder.h index 84d99badcbdd..e4b2a1fec30f 100644 --- a/dom/media/platforms/apple/AppleVTDecoder.h +++ b/dom/media/platforms/apple/AppleVTDecoder.h @@ -92,7 +92,10 @@ class AppleVTDecoder : public MediaDataDecoder, const uint32_t mDisplayHeight; const gfx::YUVColorSpace mColorSpace; const gfx::ColorRange mColorRange; +#if defined(MAC_OS_VERSION_10_13) && \ + MAC_OS_X_VERSION_MAX_ALLOWED >= MAC_OS_VERSION_10_13 const gfx::ColorDepth mColorDepth; +#endif // Method to set up the decompression session. MediaResult InitializeSession(); diff --git a/gfx/2d/MacIOSurface.cpp b/gfx/2d/MacIOSurface.cpp index dc19bf7d36dc..59ecee8a48b6 100644 --- a/gfx/2d/MacIOSurface.cpp +++ b/gfx/2d/MacIOSurface.cpp @@ -404,21 +404,10 @@ already_AddRefed MacIOSurface::GetAsDrawTargetLocked( } SurfaceFormat MacIOSurface::GetFormat() const { -#if !defined(MAC_OS_VERSION_10_13) || \ - MAC_OS_X_VERSION_MAX_ALLOWED < MAC_OS_VERSION_10_13 - enum : OSType { - kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange = 'x420', - kCVPixelFormatType_420YpCbCr10BiPlanarFullRange = 'xf20', - }; -#endif - switch (GetPixelFormat()) { case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: return SurfaceFormat::NV12; - case kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange: - case kCVPixelFormatType_420YpCbCr10BiPlanarFullRange: - return SurfaceFormat::P010; case kCVPixelFormatType_422YpCbCr8_yuvs: case kCVPixelFormatType_422YpCbCr8FullRange: return SurfaceFormat::YUV422; diff --git a/gfx/layers/NativeLayerCA.h b/gfx/layers/NativeLayerCA.h index 43f5e4c91e52..e34a67bbefc9 100644 --- a/gfx/layers/NativeLayerCA.h +++ b/gfx/layers/NativeLayerCA.h @@ -355,6 +355,8 @@ class NativeLayerCA : public NativeLayer { // changed. UpdateType HasUpdate(bool aIsVideo); + bool CanSpecializeSurface(IOSurfaceRef surface); + // Lazily initialized by first call to ApplyChanges. mWrappingLayer is the // layer that applies the intersection of mDisplayRect and mClipRect (if // set), and mContentCALayer is the layer that hosts the IOSurface. We do diff --git a/gfx/layers/NativeLayerCA.mm b/gfx/layers/NativeLayerCA.mm index 0b93dea9e4a1..e4814b961bbb 100644 --- a/gfx/layers/NativeLayerCA.mm +++ b/gfx/layers/NativeLayerCA.mm @@ -29,7 +29,6 @@ #include "mozilla/webrender/RenderMacIOSurfaceTextureHost.h" #include "nsCocoaFeatures.h" #include "ScopedGLHelpers.h" -#include "SDKDeclarations.h" @interface CALayer (PrivateSetContentsOpaque) - (void)setContentsOpaque:(BOOL)opaque; @@ -782,47 +781,9 @@ bool NativeLayerCA::IsVideoAndLocked(const MutexAutoLock& aProofOfLock) { } bool NativeLayerCA::ShouldSpecializeVideo(const MutexAutoLock& aProofOfLock) { - if (!IsVideoAndLocked(aProofOfLock)) { - // Only videos are eligible. - return false; - } - - if (!StaticPrefs::gfx_core_animation_specialize_video() || - !nsCocoaFeatures::OnHighSierraOrLater()) { - // Pref must be set and we must be on a modern-enough macOS. - return false; - } - - // Beyond this point, we need to know about the format of the video. - - MOZ_ASSERT(mTextureHost); - MacIOSurface* macIOSurface = mTextureHost->GetSurface(); - if (macIOSurface->GetYUVColorSpace() == gfx::YUVColorSpace::BT2020) { - // BT2020 is a signifier of HDR color space, whether or not the bit depth - // is expanded to cover that color space. This video needs a specialized - // video layer. - return true; - } - - CFTypeRefPtr surface = macIOSurface->GetIOSurfaceRef(); - OSType pixelFormat = IOSurfaceGetPixelFormat(surface.get()); - if (pixelFormat == kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange || - pixelFormat == kCVPixelFormatType_420YpCbCr10BiPlanarFullRange) { - // HDR videos require specialized video layers. - return true; - } - - // Beyond this point, we return true if-and-only-if we think we can achieve - // the power-saving "detached mode" of the macOS compositor. - - if (pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange && - pixelFormat != kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) { - // The video is not in one of the formats that qualifies for detachment. - return false; - } - - // It will only detach if we're fullscreen. - return mRootWindowIsFullscreen; + return StaticPrefs::gfx_core_animation_specialize_video() && + nsCocoaFeatures::OnHighSierraOrLater() && mRootWindowIsFullscreen && + IsVideoAndLocked(aProofOfLock); } void NativeLayerCA::SetRootWindowIsFullscreen(bool aFullscreen) { @@ -1283,10 +1244,6 @@ static NSString* NSStringForOSType(OSType type) { CGColorSpaceRef colorSpace = CVImageBufferGetColorSpace(aBuffer); NSLog(@"ColorSpace is %@.\n", colorSpace); - CFDictionaryRef bufferAttachments = - CVBufferGetAttachments(aBuffer, kCVAttachmentMode_ShouldPropagate); - NSLog(@"Buffer attachments are %@.\n", bufferAttachments); - OSType codec = CMFormatDescriptionGetMediaSubType(aFormat); NSLog(@"Codec is %@.\n", NSStringForOSType(codec)); @@ -1423,10 +1380,13 @@ bool NativeLayerCA::Representation::ApplyChanges( bool updateSucceeded = false; if (aSpecializeVideo) { IOSurfaceRef surface = aFrontSurface.get(); - updateSucceeded = EnqueueSurface(surface); + if (CanSpecializeSurface(surface)) { + IOSurfaceRef surface = aFrontSurface.get(); + updateSucceeded = EnqueueSurface(surface); - if (updateSucceeded) { - mMutatedFrontSurface = false; + if (updateSucceeded) { + mMutatedFrontSurface = false; + } } } @@ -1580,7 +1540,7 @@ bool NativeLayerCA::Representation::ApplyChanges( if (mMutatedFrontSurface) { bool isEnqueued = false; IOSurfaceRef surface = aFrontSurface.get(); - if (aSpecializeVideo) { + if (aSpecializeVideo && CanSpecializeSurface(surface)) { // Attempt to enqueue this as a video frame. If we fail, we'll fall back to image case. isEnqueued = EnqueueSurface(surface); } @@ -1642,6 +1602,15 @@ bool NativeLayerCA::WillUpdateAffectLayers(WhichRepresentation aRepresentation) return GetRepresentation(aRepresentation).mMutatedSpecializeVideo; } +bool NativeLayerCA::Representation::CanSpecializeSurface(IOSurfaceRef surface) { + // Software decoded videos can't achieve detached mode. Until Bug 1731691 is fixed, + // there's no benefit to specializing these videos. For now, only allow 420v or 420f, + // which we get from hardware decode. + OSType pixelFormat = IOSurfaceGetPixelFormat(surface); + return (pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange || + pixelFormat == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange); +} + // Called when mMutex is already being held by the current thread. Maybe NativeLayerCA::GetUnusedSurfaceAndCleanUp( const MutexAutoLock& aProofOfLock) { diff --git a/gfx/layers/d3d11/TextureD3D11.cpp b/gfx/layers/d3d11/TextureD3D11.cpp index c2331e3035e2..df32c69e47bb 100644 --- a/gfx/layers/d3d11/TextureD3D11.cpp +++ b/gfx/layers/d3d11/TextureD3D11.cpp @@ -1034,11 +1034,6 @@ void DXGITextureHostD3D11::PushDisplayItems( case gfx::SurfaceFormat::P010: case gfx::SurfaceFormat::P016: case gfx::SurfaceFormat::NV12: { - // DXGI_FORMAT_P010 stores its 10 bit value in the most significant bits - // of each 16 bit word with the unused lower bits cleared to zero so that - // it may be handled as if it was DXGI_FORMAT_P016. This is approximately - // perceptually correct. However, due to rounding error, the precise - // quantized value after sampling may be off by 1. MOZ_ASSERT(aImageKeys.length() == 2); aBuilder.PushNV12Image( aBounds, aClip, true, aImageKeys[0], aImageKeys[1], diff --git a/gfx/layers/opengl/MacIOSurfaceTextureHostOGL.cpp b/gfx/layers/opengl/MacIOSurfaceTextureHostOGL.cpp index dede69c4dcc0..a2c492dcb16b 100644 --- a/gfx/layers/opengl/MacIOSurfaceTextureHostOGL.cpp +++ b/gfx/layers/opengl/MacIOSurfaceTextureHostOGL.cpp @@ -92,8 +92,7 @@ uint32_t MacIOSurfaceTextureHostOGL::NumSubTextures() { case gfx::SurfaceFormat::YUV422: { return 1; } - case gfx::SurfaceFormat::NV12: - case gfx::SurfaceFormat::P010: { + case gfx::SurfaceFormat::NV12: { return 2; } default: { @@ -154,21 +153,6 @@ void MacIOSurfaceTextureHostOGL::PushResourceUpdates( (aResources.*method)(aImageKeys[1], descriptor1, aExtID, imageType, 1); break; } - case gfx::SurfaceFormat::P010: { - MOZ_ASSERT(aImageKeys.length() == 2); - MOZ_ASSERT(mSurface->GetPlaneCount() == 2); - wr::ImageDescriptor descriptor0( - gfx::IntSize(mSurface->GetDevicePixelWidth(0), - mSurface->GetDevicePixelHeight(0)), - gfx::SurfaceFormat::A16); - wr::ImageDescriptor descriptor1( - gfx::IntSize(mSurface->GetDevicePixelWidth(1), - mSurface->GetDevicePixelHeight(1)), - gfx::SurfaceFormat::R16G16); - (aResources.*method)(aImageKeys[0], descriptor0, aExtID, imageType, 0); - (aResources.*method)(aImageKeys[1], descriptor1, aExtID, imageType, 1); - break; - } default: { MOZ_ASSERT_UNREACHABLE("unexpected to be called"); } @@ -207,15 +191,14 @@ void MacIOSurfaceTextureHostOGL::PushDisplayItems( /* aSupportsExternalCompositing */ true); break; } - case gfx::SurfaceFormat::NV12: - case gfx::SurfaceFormat::P010: { + case gfx::SurfaceFormat::NV12: { MOZ_ASSERT(aImageKeys.length() == 2); MOZ_ASSERT(mSurface->GetPlaneCount() == 2); + // Those images can only be generated at present by the Apple H264 decoder + // which only supports 8 bits color depth. aBuilder.PushNV12Image( aBounds, aClip, true, aImageKeys[0], aImageKeys[1], - GetFormat() == gfx::SurfaceFormat::NV12 ? wr::ColorDepth::Color8 - : wr::ColorDepth::Color10, - wr::ToWrYuvColorSpace(GetYUVColorSpace()), + wr::ColorDepth::Color8, wr::ToWrYuvColorSpace(GetYUVColorSpace()), wr::ToWrColorRange(GetColorRange()), aFilter, preferCompositorSurface, /* aSupportsExternalCompositing */ true); break; diff --git a/gfx/wr/webrender_api/src/image.rs b/gfx/wr/webrender_api/src/image.rs index 8d86ef1d7b88..e03a5d88ae18 100644 --- a/gfx/wr/webrender_api/src/image.rs +++ b/gfx/wr/webrender_api/src/image.rs @@ -160,7 +160,7 @@ pub enum ImageFormat { /// Two-channels, byte storage. Similar to `R8`, this just means /// "two channels" rather than "red and green". RG8 = 5, - /// Two-channels, short storage. Similar to `R16`, this just means + /// Two-channels, byte storage. Similar to `R16`, this just means /// "two channels" rather than "red and green". RG16 = 6, diff --git a/widget/cocoa/SDKDeclarations.h b/widget/cocoa/SDKDeclarations.h index b06409358201..dca3ff66bc94 100644 --- a/widget/cocoa/SDKDeclarations.h +++ b/widget/cocoa/SDKDeclarations.h @@ -36,11 +36,6 @@ using NSAppearanceName = NSString*; - (BOOL)launchAndReturnError:(NSError**)error NS_AVAILABLE_MAC(10_13); @end -enum : OSType { - kCVPixelFormatType_420YpCbCr10BiPlanarVideoRange = 'x420', - kCVPixelFormatType_420YpCbCr10BiPlanarFullRange = 'xf20', -}; - #endif #if !defined(MAC_OS_X_VERSION_10_14) || MAC_OS_X_VERSION_MAX_ALLOWED < MAC_OS_X_VERSION_10_14