Bug 1657107 - Use YUV22 format for uploading videos to IOSurfaces instead of NV12. r=mstange

Differential Revision: https://phabricator.services.mozilla.com/D86081
This commit is contained in:
Matt Woodrow 2020-08-06 21:27:46 +00:00
Родитель 172e5c81ec
Коммит a679ee84cf
9 изменённых файлов: 145 добавлений и 49 удалений

Просмотреть файл

@ -351,12 +351,15 @@ already_AddRefed<VideoData> VideoData::CreateAndCopyData(
}
}
#elif XP_MACOSX
RefPtr<layers::MacIOSurfaceImage> ioImage =
new layers::MacIOSurfaceImage(nullptr);
PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
if (ioImage->SetData(aContainer, data)) {
v->mImage = ioImage;
return v.forget();
if (aAllocator && aAllocator->GetCompositorBackendType() ==
layers::LayersBackend::LAYERS_WR) {
RefPtr<layers::MacIOSurfaceImage> ioImage =
new layers::MacIOSurfaceImage(nullptr);
PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
if (ioImage->SetData(aContainer, data)) {
v->mImage = ioImage;
return v.forget();
}
}
#endif
if (!v->mImage) {

Просмотреть файл

@ -40,9 +40,9 @@ nsresult AppleDecoderModule::Startup() {
already_AddRefed<MediaDataDecoder> AppleDecoderModule::CreateVideoDecoder(
const CreateDecoderParams& aParams) {
RefPtr<MediaDataDecoder> decoder =
new AppleVTDecoder(aParams.VideoConfig(), aParams.mTaskQueue,
aParams.mImageContainer, aParams.mOptions);
RefPtr<MediaDataDecoder> decoder = new AppleVTDecoder(
aParams.VideoConfig(), aParams.mTaskQueue, aParams.mImageContainer,
aParams.mOptions, aParams.mKnowsCompositor);
return decoder.forget();
}

Просмотреть файл

@ -31,7 +31,8 @@ using namespace layers;
AppleVTDecoder::AppleVTDecoder(const VideoInfo& aConfig, TaskQueue* aTaskQueue,
layers::ImageContainer* aImageContainer,
CreateDecoderParams::OptionSet aOptions)
CreateDecoderParams::OptionSet aOptions,
layers::KnowsCompositor* aKnowsCompositor)
: mExtraData(aConfig.mExtraData),
mPictureWidth(aConfig.mImage.width),
mPictureHeight(aConfig.mImage.height),
@ -45,7 +46,8 @@ AppleVTDecoder::AppleVTDecoder(const VideoInfo& aConfig, TaskQueue* aTaskQueue,
mMaxRefFrames(aOptions.contains(CreateDecoderParams::Option::LowLatency)
? 0
: H264::ComputeMaxRefFrames(aConfig.mExtraData)),
mImageContainer(aImageContainer)
mImageContainer(aImageContainer),
mKnowsCompositor(aKnowsCompositor)
#ifdef MOZ_WIDGET_UIKIT
,
mUseSoftwareImages(true)
@ -409,7 +411,8 @@ void AppleVTDecoder::OutputFrame(CVPixelBufferRef aImage,
data = VideoData::CreateAndCopyData(
info, mImageContainer, aFrameRef.byte_offset,
aFrameRef.composition_timestamp, aFrameRef.duration, buffer,
aFrameRef.is_sync_point, aFrameRef.decode_timestamp, visible);
aFrameRef.is_sync_point, aFrameRef.decode_timestamp, visible,
mKnowsCompositor);
// Unlock the returned image data.
CVPixelBufferUnlockBaseAddress(aImage, kCVPixelBufferLock_ReadOnly);
} else {

Просмотреть файл

@ -25,7 +25,8 @@ class AppleVTDecoder : public MediaDataDecoder,
public:
AppleVTDecoder(const VideoInfo& aConfig, TaskQueue* aTaskQueue,
layers::ImageContainer* aImageContainer,
CreateDecoderParams::OptionSet aOptions);
CreateDecoderParams::OptionSet aOptions,
layers::KnowsCompositor* aKnowsCompositor);
class AppleFrameRef {
public:
@ -100,6 +101,7 @@ class AppleVTDecoder : public MediaDataDecoder,
const RefPtr<TaskQueue> mTaskQueue;
const uint32_t mMaxRefFrames;
const RefPtr<layers::ImageContainer> mImageContainer;
const RefPtr<layers::KnowsCompositor> mKnowsCompositor;
const bool mUseSoftwareImages;
// Set on reader/decode thread calling Flush() to indicate that output is

Просмотреть файл

@ -181,6 +181,71 @@ already_AddRefed<MacIOSurface> MacIOSurface::CreateNV12Surface(
return ioSurface.forget();
}
/* static */
already_AddRefed<MacIOSurface> MacIOSurface::CreateYUV422Surface(
const IntSize& aSize, YUVColorSpace aColorSpace, ColorRange aColorRange) {
MOZ_ASSERT(aColorSpace == YUVColorSpace::BT601 ||
aColorSpace == YUVColorSpace::BT709);
MOZ_ASSERT(aColorRange == ColorRange::LIMITED ||
aColorRange == ColorRange::FULL);
auto props = CFTypeRefPtr<CFMutableDictionaryRef>::WrapUnderCreateRule(
::CFDictionaryCreateMutable(kCFAllocatorDefault, 4,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks));
if (!props) return nullptr;
MOZ_ASSERT((size_t)aSize.width <= GetMaxWidth());
MOZ_ASSERT((size_t)aSize.height <= GetMaxHeight());
AddDictionaryInt(props, kIOSurfaceWidth, aSize.width);
AddDictionaryInt(props, kIOSurfaceHeight, aSize.height);
::CFDictionaryAddValue(props.get(), kIOSurfaceIsGlobal, kCFBooleanTrue);
AddDictionaryInt(props, kIOSurfaceBytesPerElement, 2);
if (aColorRange == ColorRange::LIMITED) {
AddDictionaryInt(props, kIOSurfacePixelFormat,
(uint32_t)kCVPixelFormatType_422YpCbCr8_yuvs);
} else {
AddDictionaryInt(props, kIOSurfacePixelFormat,
(uint32_t)kCVPixelFormatType_422YpCbCr8FullRange);
}
CFTypeRefPtr<IOSurfaceRef> surfaceRef =
CFTypeRefPtr<IOSurfaceRef>::WrapUnderCreateRule(
::IOSurfaceCreate(props.get()));
if (!surfaceRef) {
return nullptr;
}
// Setup the correct YCbCr conversion matrix on the IOSurface, in case we pass
// this directly to CoreAnimation.
if (aColorSpace == YUVColorSpace::BT601) {
IOSurfaceSetValue(surfaceRef.get(), CFSTR("IOSurfaceYCbCrMatrix"),
CFSTR("ITU_R_601_4"));
} else {
IOSurfaceSetValue(surfaceRef.get(), CFSTR("IOSurfaceYCbCrMatrix"),
CFSTR("ITU_R_709_2"));
}
// Override the color space to be the same as the main display, so that
// CoreAnimation won't try to do any color correction (from the IOSurface
// space, to the display). In the future we may want to try specifying this
// correctly, but probably only once we do the same for videos drawn through
// our gfx code.
auto colorSpace = CFTypeRefPtr<CGColorSpaceRef>::WrapUnderCreateRule(
CGDisplayCopyColorSpace(CGMainDisplayID()));
auto colorData = CFTypeRefPtr<CFDataRef>::WrapUnderCreateRule(
CGColorSpaceCopyICCProfile(colorSpace.get()));
IOSurfaceSetValue(surfaceRef.get(), CFSTR("IOSurfaceColorSpace"),
colorData.get());
RefPtr<MacIOSurface> ioSurface =
new MacIOSurface(std::move(surfaceRef), 1.0, false, aColorSpace);
return ioSurface.forget();
}
/* static */
already_AddRefed<MacIOSurface> MacIOSurface::LookupSurface(
IOSurfaceID aIOSurfaceID, double aContentsScaleFactor, bool aHasAlpha,
@ -328,7 +393,8 @@ SurfaceFormat MacIOSurface::GetFormat() const {
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
return SurfaceFormat::NV12;
case kCVPixelFormatType_422YpCbCr8:
case kCVPixelFormatType_422YpCbCr8_yuvs:
case kCVPixelFormatType_422YpCbCr8FullRange:
return SurfaceFormat::YUV422;
case kCVPixelFormatType_32BGRA:
return HasAlpha() ? SurfaceFormat::B8G8R8A8 : SurfaceFormat::B8G8R8X8;
@ -383,7 +449,8 @@ CGLError MacIOSurface::CGLTexImageIOSurface2D(
if (aOutReadFormat) {
*aOutReadFormat = mozilla::gfx::SurfaceFormat::NV12;
}
} else if (pixelFormat == kCVPixelFormatType_422YpCbCr8) {
} else if (pixelFormat == kCVPixelFormatType_422YpCbCr8_yuvs ||
pixelFormat == kCVPixelFormatType_422YpCbCr8FullRange) {
MOZ_ASSERT(plane == 0);
// The YCBCR_422_APPLE ext is only available in compatibility profile. So,
// we should use RGB_422_APPLE for core profile. The difference between
@ -406,7 +473,7 @@ CGLError MacIOSurface::CGLTexImageIOSurface2D(
}
}
internalFormat = LOCAL_GL_RGB;
type = LOCAL_GL_UNSIGNED_SHORT_8_8_APPLE;
type = LOCAL_GL_UNSIGNED_SHORT_8_8_REV_APPLE;
} else {
MOZ_ASSERT(plane == 0);

Просмотреть файл

@ -62,6 +62,8 @@ class MacIOSurface final
static already_AddRefed<MacIOSurface> CreateNV12Surface(
const IntSize& aYSize, const IntSize& aCbCrSize,
YUVColorSpace aColorSpace, ColorRange aColorRange);
static already_AddRefed<MacIOSurface> CreateYUV422Surface(
const IntSize& aSize, YUVColorSpace aColorSpace, ColorRange aColorRange);
static void ReleaseIOSurface(MacIOSurface* aIOSurface);
static already_AddRefed<MacIOSurface> LookupSurface(
IOSurfaceID aSurfaceID, double aContentsScaleFactor = 1.0,

Просмотреть файл

@ -72,14 +72,14 @@ enum class SurfaceFormat : int8_t {
// These ones are their own special cases.
YUV,
NV12, // YUV 4:2:0 image with a plane of 8 bit Y samples followed by
// an interleaved U/V plane containing 8 bit 2x2 subsampled
// colour difference samples.
P016, // Similar to NV12, but with 16 bits plane values
P010, // Identical to P016 but the 6 least significant bits are 0.
// With DXGI in theory entirely compatible, however practice has
// shown that it's not the case.
YUV422,
NV12, // YUV 4:2:0 image with a plane of 8 bit Y samples followed by
// an interleaved U/V plane containing 8 bit 2x2 subsampled
// colour difference samples.
P016, // Similar to NV12, but with 16 bits plane values
P010, // Identical to P016 but the 6 least significant bits are 0.
// With DXGI in theory entirely compatible, however practice has
// shown that it's not the case.
YUV422, // Single plane YUV 4:2:2 interleaved as Y`0 Cb Y`1 Cr.
HSV,
Lab,
Depth,

Просмотреть файл

@ -120,6 +120,9 @@ CreateSourceSurfaceFromLockedMacIOSurface(MacIOSurface* aSurface) {
for (size_t i = 0; i < ioHeight; i++) {
uint8_t* rowSrc = src + bytesPerRow * i;
for (size_t j = 0; j < cbCrWidth; j++) {
*yDest = *rowSrc;
yDest++;
rowSrc++;
*cbDest = *rowSrc;
cbDest++;
rowSrc++;
@ -129,9 +132,6 @@ CreateSourceSurfaceFromLockedMacIOSurface(MacIOSurface* aSurface) {
*crDest = *rowSrc;
crDest++;
rowSrc++;
*yDest = *rowSrc;
yDest++;
rowSrc++;
}
if (strideDelta) {
cbDest += strideDelta;

Просмотреть файл

@ -51,6 +51,16 @@ bool MacIOSurfaceImage::SetData(ImageContainer* aContainer,
return false;
}
if (aData.mCbCrSize.width * 2 != aData.mYSize.width) {
return false;
}
// We can only support 4:2:2 and 4:2:0 formats currently.
if (aData.mCbCrSize.height != aData.mYSize.height &&
aData.mCbCrSize.height * 2 != aData.mYSize.height) {
return false;
}
RefPtr<MacIOSurfaceRecycleAllocator> allocator =
aContainer->GetMacIOSurfaceRecycleAllocator();
@ -59,30 +69,44 @@ bool MacIOSurfaceImage::SetData(ImageContainer* aContainer,
surf->Lock(false);
// If the CbCrSize's height is half of the YSize's height, then we'll
// need to duplicate the CbCr data on every second row.
size_t heightScale = aData.mYSize.height / aData.mCbCrSize.height;
MOZ_ASSERT(surf->GetFormat() == SurfaceFormat::YUV422);
// The underlying IOSurface has format kCVPixelFormatType_422YpCbCr8FullRange
// or kCVPixelFormatType_422YpCbCr8_yuvs, which uses a 4:2:2 Y`0 Cb Y`1 Cr
// layout. See CVPixelBuffer.h for the full list of format descriptions.
MOZ_ASSERT(aData.mYSize.height > 0);
uint8_t* dst = (uint8_t*)surf->GetBaseAddressOfPlane(0);
size_t stride = surf->GetBytesPerRow(0);
for (size_t i = 0; i < (size_t)aData.mYSize.height; i++) {
uint8_t* rowSrc = aData.mYChannel + aData.mYStride * i;
uint8_t* rowDst = dst + stride * i;
memcpy(rowDst, rowSrc, aData.mYSize.width);
}
// CoreAnimation doesn't appear to support planar YCbCr formats, so we
// allocated an NV12 surface and now we need to copy and interleave the Cb and
// Cr channels.
MOZ_ASSERT(aData.mCbCrSize.height > 0);
dst = (uint8_t*)surf->GetBaseAddressOfPlane(1);
stride = surf->GetBytesPerRow(1);
for (size_t i = 0; i < (size_t)aData.mCbCrSize.height; i++) {
uint8_t* rowCbSrc = aData.mCbChannel + aData.mCbCrStride * i;
uint8_t* rowCrSrc = aData.mCrChannel + aData.mCbCrStride * i;
// Compute the row addresses. If the input was 4:2:0, then
// we divide i by 2, so that each source row of CbCr maps to
// two dest rows.
uint8_t* rowYSrc = aData.mYChannel + aData.mYStride * i;
uint8_t* rowCbSrc =
aData.mCbChannel + aData.mCbCrStride * (i / heightScale);
uint8_t* rowCrSrc =
aData.mCrChannel + aData.mCbCrStride * (i / heightScale);
uint8_t* rowDst = dst + stride * i;
// Iterate across the CbCr width (which we have guaranteed to be half of
// the surface width), and write two 16bit pixels each time.
for (size_t j = 0; j < (size_t)aData.mCbCrSize.width; j++) {
*rowDst = *rowYSrc;
rowDst++;
rowYSrc++;
*rowDst = *rowCbSrc;
rowDst++;
rowCbSrc++;
*rowDst = *rowYSrc;
rowDst++;
rowYSrc++;
*rowDst = *rowCrSrc;
rowDst++;
rowCrSrc++;
@ -101,15 +125,10 @@ already_AddRefed<MacIOSurface> MacIOSurfaceRecycleAllocator::Allocate(
nsTArray<CFTypeRefPtr<IOSurfaceRef>> surfaces = std::move(mSurfaces);
RefPtr<MacIOSurface> result;
for (auto& surf : surfaces) {
MOZ_ASSERT(::IOSurfaceGetPlaneCount(surf.get()) == 2);
// If the surface size has changed, then discard any surfaces of the old
// size.
if (::IOSurfaceGetWidthOfPlane(surf.get(), 0) != (size_t)aYSize.width ||
::IOSurfaceGetHeightOfPlane(surf.get(), 0) != (size_t)aYSize.height ||
::IOSurfaceGetWidthOfPlane(surf.get(), 1) != (size_t)aCbCrSize.width ||
::IOSurfaceGetHeightOfPlane(surf.get(), 1) !=
(size_t)aCbCrSize.height) {
::IOSurfaceGetHeightOfPlane(surf.get(), 0) != (size_t)aYSize.height) {
continue;
}
@ -123,8 +142,8 @@ already_AddRefed<MacIOSurface> MacIOSurfaceRecycleAllocator::Allocate(
}
if (!result) {
result = MacIOSurface::CreateNV12Surface(aYSize, aCbCrSize, aYUVColorSpace,
aColorRange);
result =
MacIOSurface::CreateYUV422Surface(aYSize, aYUVColorSpace, aColorRange);
if (mSurfaces.Length() <
StaticPrefs::layers_iosurfaceimage_recycle_limit()) {