Bug 1757067 - Explicitly specify chroma subsampling scheme for YCbCr data. r=jgilbert,jrmuizel

Differential Revision: https://phabricator.services.mozilla.com/D139656
This commit is contained in:
Lee Salzman 2022-03-10 07:48:53 +00:00
Родитель 21044f01dd
Коммит a664a6d305
59 изменённых файлов: 545 добавлений и 543 удалений

Просмотреть файл

@ -53,46 +53,42 @@ static ImageBitmapFormat GetImageBitmapFormatFromPlanarYCbCrData(
layers::PlanarYCbCrData const* aData) {
MOZ_ASSERT(aData);
auto ySize = aData->YDataSize();
auto cbcrSize = aData->CbCrDataSize();
media::Interval<uintptr_t> YInterval(
uintptr_t(aData->mYChannel),
uintptr_t(aData->mYChannel) + aData->mYSize.height * aData->mYStride),
CbInterval(uintptr_t(aData->mCbChannel),
uintptr_t(aData->mCbChannel) +
aData->mCbCrSize.height * aData->mCbCrStride),
CrInterval(uintptr_t(aData->mCrChannel),
uintptr_t(aData->mCrChannel) +
aData->mCbCrSize.height * aData->mCbCrStride);
uintptr_t(aData->mYChannel) + ySize.height * aData->mYStride),
CbInterval(
uintptr_t(aData->mCbChannel),
uintptr_t(aData->mCbChannel) + cbcrSize.height * aData->mCbCrStride),
CrInterval(
uintptr_t(aData->mCrChannel),
uintptr_t(aData->mCrChannel) + cbcrSize.height * aData->mCbCrStride);
if (aData->mYSkip == 0 && aData->mCbSkip == 0 &&
aData->mCrSkip == 0) { // Possibly three planes.
if (!YInterval.Intersects(CbInterval) &&
!CbInterval.Intersects(CrInterval)) { // Three planes.
if (aData->mYSize.height == aData->mCbCrSize.height) {
if (aData->mYSize.width == aData->mCbCrSize.width) {
switch (aData->mChromaSubsampling) {
case ChromaSubsampling::FULL:
return ImageBitmapFormat::YUV444P;
}
if (((aData->mYSize.width + 1) / 2) == aData->mCbCrSize.width) {
case ChromaSubsampling::HALF_WIDTH:
return ImageBitmapFormat::YUV422P;
}
} else if (((aData->mYSize.height + 1) / 2) == aData->mCbCrSize.height) {
if (((aData->mYSize.width + 1) / 2) == aData->mCbCrSize.width) {
case ChromaSubsampling::HALF_WIDTH_AND_HEIGHT:
return ImageBitmapFormat::YUV420P;
}
default:
break;
}
}
} else if (aData->mYSkip == 0 && aData->mCbSkip == 1 &&
aData->mCrSkip == 1) { // Possibly two planes.
} else if (aData->mYSkip == 0 && aData->mCbSkip == 1 && aData->mCrSkip == 1 &&
aData->mChromaSubsampling ==
ChromaSubsampling::HALF_WIDTH_AND_HEIGHT) { // Possibly two
// planes.
if (!YInterval.Intersects(CbInterval) &&
aData->mCbChannel == aData->mCrChannel - 1) { // Two planes.
if (((aData->mYSize.height + 1) / 2) == aData->mCbCrSize.height &&
((aData->mYSize.width + 1) / 2) == aData->mCbCrSize.width) {
return ImageBitmapFormat::YUV420SP_NV12; // Y-Cb-Cr
}
return ImageBitmapFormat::YUV420SP_NV12; // Y-Cb-Cr
} else if (!YInterval.Intersects(CrInterval) &&
aData->mCrChannel == aData->mCbChannel - 1) { // Two planes.
if (((aData->mYSize.height + 1) / 2) == aData->mCbCrSize.height &&
((aData->mYSize.width + 1) / 2) == aData->mCbCrSize.width) {
return ImageBitmapFormat::YUV420SP_NV21; // Y-Cr-Cb
}
return ImageBitmapFormat::YUV420SP_NV21; // Y-Cr-Cb
}
}

Просмотреть файл

@ -262,22 +262,19 @@ PlanarYCbCrData ConstructPlanarYCbCrData(const VideoInfo& aInfo,
PlanarYCbCrData data;
data.mYChannel = Y.mData;
data.mYSize = IntSize(Y.mWidth, Y.mHeight);
data.mYStride = Y.mStride;
data.mYSkip = Y.mSkip;
data.mCbChannel = Cb.mData;
data.mCrChannel = Cr.mData;
data.mCbCrSize = IntSize(Cb.mWidth, Cb.mHeight);
data.mCbCrStride = Cb.mStride;
data.mCbSkip = Cb.mSkip;
data.mCrSkip = Cr.mSkip;
data.mPicX = aPicture.x;
data.mPicY = aPicture.y;
data.mPicSize = aPicture.Size();
data.mPictureRect = aPicture;
data.mStereoMode = aInfo.mStereoMode;
data.mYUVColorSpace = aBuffer.mYUVColorSpace;
data.mColorDepth = aBuffer.mColorDepth;
data.mColorRange = aBuffer.mColorRange;
data.mChromaSubsampling = aBuffer.mChromaSubsampling;
return data;
}

Просмотреть файл

@ -421,6 +421,7 @@ class VideoData : public MediaData {
typedef gfx::ColorDepth ColorDepth;
typedef gfx::ColorRange ColorRange;
typedef gfx::YUVColorSpace YUVColorSpace;
typedef gfx::ChromaSubsampling ChromaSubsampling;
typedef layers::ImageContainer ImageContainer;
typedef layers::Image Image;
typedef layers::PlanarYCbCrImage PlanarYCbCrImage;
@ -445,6 +446,7 @@ class VideoData : public MediaData {
YUVColorSpace mYUVColorSpace = YUVColorSpace::Identity;
ColorDepth mColorDepth = ColorDepth::COLOR_8;
ColorRange mColorRange = ColorRange::LIMITED;
ChromaSubsampling mChromaSubsampling = ChromaSubsampling::FULL;
};
// Constructs a VideoData object. If aImage is nullptr, creates a new Image

Просмотреть файл

@ -19,7 +19,7 @@ static bool SetImageToBlackPixel(layers::PlanarYCbCrImage* aImage) {
data.mCbChannel = blackPixel + 1;
data.mCrChannel = blackPixel + 2;
data.mYStride = data.mCbCrStride = 1;
data.mPicSize = data.mYSize = data.mCbCrSize = gfx::IntSize(1, 1);
data.mPictureRect = gfx::IntRect(0, 0, 1, 1);
data.mYUVColorSpace = gfx::YUVColorSpace::BT601;
// This could be made FULL once bug 1568745 is complete. A black pixel being
// 0x00, 0x80, 0x80

Просмотреть файл

@ -52,35 +52,30 @@ already_AddRefed<Image> VideoFrame::CreateBlackImage(
return nullptr;
}
int len = ((aSize.width * aSize.height) * 3 / 2);
gfx::IntSize cbcrSize((aSize.width + 1) / 2, (aSize.height + 1) / 2);
int yLen = aSize.width * aSize.height;
int cbcrLen = cbcrSize.width * cbcrSize.height;
// Generate a black image.
auto frame = MakeUnique<uint8_t[]>(len);
int y = aSize.width * aSize.height;
auto frame = MakeUnique<uint8_t[]>(yLen + 2 * cbcrLen);
// Fill Y plane.
memset(frame.get(), 0x10, y);
memset(frame.get(), 0x10, yLen);
// Fill Cb/Cr planes.
memset(frame.get() + y, 0x80, (len - y));
const uint8_t lumaBpp = 8;
const uint8_t chromaBpp = 4;
memset(frame.get() + yLen, 0x80, 2 * cbcrLen);
layers::PlanarYCbCrData data;
data.mYChannel = frame.get();
data.mYSize = gfx::IntSize(aSize.width, aSize.height);
data.mYStride = (int32_t)(aSize.width * lumaBpp / 8.0);
data.mCbCrStride = (int32_t)(aSize.width * chromaBpp / 8.0);
data.mCbChannel = frame.get() + aSize.height * data.mYStride;
data.mCrChannel = data.mCbChannel + aSize.height * data.mCbCrStride / 2;
data.mCbCrSize = gfx::IntSize(aSize.width / 2, aSize.height / 2);
data.mPicX = 0;
data.mPicY = 0;
data.mPicSize = gfx::IntSize(aSize.width, aSize.height);
data.mYStride = aSize.width;
data.mCbCrStride = cbcrSize.width;
data.mCbChannel = frame.get() + yLen;
data.mCrChannel = data.mCbChannel + cbcrLen;
data.mPictureRect = gfx::IntRect(0, 0, aSize.width, aSize.height);
data.mStereoMode = StereoMode::MONO;
data.mYUVColorSpace = gfx::YUVColorSpace::BT601;
// This could be made FULL once bug 1568745 is complete. A black pixel being
// 0x00, 0x80, 0x80
data.mColorRange = gfx::ColorRange::LIMITED;
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
// Copies data, so we can free data.
if (!image->CopyData(data)) {

Просмотреть файл

@ -960,6 +960,8 @@ already_AddRefed<VideoData> ChromiumCDMParent::CreateVideoFrame(
b.mPlanes[2].mStride = aFrame.mVPlane().mStride();
b.mPlanes[2].mSkip = 0;
b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
// We unfortunately can't know which colorspace the video is using at this
// stage.
b.mYUVColorSpace =

Просмотреть файл

@ -56,20 +56,20 @@ class MediaDataEncoderTest : public testing::Test {
int16_t mColorStep = 4;
void Init(const gfx::IntSize& aSize) {
mYUV.mPicSize = aSize;
mYUV.mPictureRect = gfx::IntRect(0, 0, aSize.width, aSize.height);
mYUV.mYStride = aSize.width;
mYUV.mYSize = aSize;
mYUV.mCbCrStride = aSize.width / 2;
mYUV.mCbCrSize = gfx::IntSize(aSize.width / 2, aSize.height / 2);
size_t bufferSize = mYUV.mYStride * mYUV.mYSize.height +
mYUV.mCbCrStride * mYUV.mCbCrSize.height +
mYUV.mCbCrStride * mYUV.mCbCrSize.height;
mYUV.mCbCrStride = (aSize.width + 1) / 2;
mYUV.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
auto ySize = mYUV.YDataSize();
auto cbcrSize = mYUV.CbCrDataSize();
size_t bufferSize =
mYUV.mYStride * ySize.height + 2 * mYUV.mCbCrStride * cbcrSize.height;
mBuffer = MakeUnique<uint8_t[]>(bufferSize);
std::fill_n(mBuffer.get(), bufferSize, 0x7F);
mYUV.mYChannel = mBuffer.get();
mYUV.mCbChannel = mYUV.mYChannel + mYUV.mYStride * mYUV.mYSize.height;
mYUV.mCrChannel =
mYUV.mCbChannel + mYUV.mCbCrStride * mYUV.mCbCrSize.height;
mYUV.mCbChannel = mYUV.mYChannel + mYUV.mYStride * ySize.height;
mYUV.mCrChannel = mYUV.mCbChannel + mYUV.mCbCrStride * cbcrSize.height;
mYUV.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
mRecycleBin = new layers::BufferRecycleBin();
}
@ -119,8 +119,8 @@ class MediaDataEncoderTest : public testing::Test {
}
void Draw(const size_t aIndex) {
DrawChessboard(mYUV.mYChannel, mYUV.mYSize.width, mYUV.mYSize.height,
aIndex << 1);
auto ySize = mYUV.YDataSize();
DrawChessboard(mYUV.mYChannel, ySize.width, ySize.height, aIndex << 1);
int16_t color = mYUV.mCbChannel[0] + mColorStep;
if (color > 255 || color < 0) {
mColorStep = -mColorStep;

Просмотреть файл

@ -46,7 +46,7 @@ Image* YUVBufferGenerator::CreateI420Image() {
PlanarYCbCrImage* image =
new RecyclingPlanarYCbCrImage(new BufferRecycleBin());
PlanarYCbCrData data;
data.mPicSize = mImageSize;
data.mPictureRect = gfx::IntRect(0, 0, mImageSize.width, mImageSize.height);
const uint32_t yPlaneSize = mImageSize.width * mImageSize.height;
const uint32_t halfWidth = (mImageSize.width + 1) / 2;
@ -56,8 +56,6 @@ Image* YUVBufferGenerator::CreateI420Image() {
// Y plane.
uint8_t* y = mSourceBuffer.Elements();
data.mYChannel = y;
data.mYSize.width = mImageSize.width;
data.mYSize.height = mImageSize.height;
data.mYStride = mImageSize.width;
data.mYSkip = 0;
@ -73,10 +71,9 @@ Image* YUVBufferGenerator::CreateI420Image() {
// CrCb plane vectors.
data.mCbCrStride = halfWidth;
data.mCbCrSize.width = halfWidth;
data.mCbCrSize.height = halfHeight;
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
data.mYUVColorSpace = DefaultColorSpace(data.mYSize);
data.mYUVColorSpace = DefaultColorSpace(mImageSize);
image->CopyData(data);
return image;
@ -85,17 +82,13 @@ Image* YUVBufferGenerator::CreateI420Image() {
Image* YUVBufferGenerator::CreateNV12Image() {
NVImage* image = new NVImage();
PlanarYCbCrData data;
data.mPicSize = mImageSize;
data.mPictureRect = gfx::IntRect(0, 0, mImageSize.width, mImageSize.height);
const uint32_t yPlaneSize = mImageSize.width * mImageSize.height;
const uint32_t halfWidth = (mImageSize.width + 1) / 2;
const uint32_t halfHeight = (mImageSize.height + 1) / 2;
// Y plane.
uint8_t* y = mSourceBuffer.Elements();
data.mYChannel = y;
data.mYSize.width = mImageSize.width;
data.mYSize.height = mImageSize.height;
data.mYStride = mImageSize.width;
data.mYSkip = 0;
@ -111,8 +104,7 @@ Image* YUVBufferGenerator::CreateNV12Image() {
// 4:2:0.
data.mCbCrStride = mImageSize.width;
data.mCbCrSize.width = halfWidth;
data.mCbCrSize.height = halfHeight;
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
image->SetData(data);
return image;
@ -121,17 +113,13 @@ Image* YUVBufferGenerator::CreateNV12Image() {
Image* YUVBufferGenerator::CreateNV21Image() {
NVImage* image = new NVImage();
PlanarYCbCrData data;
data.mPicSize = mImageSize;
data.mPictureRect = gfx::IntRect(0, 0, mImageSize.width, mImageSize.height);
const uint32_t yPlaneSize = mImageSize.width * mImageSize.height;
const uint32_t halfWidth = (mImageSize.width + 1) / 2;
const uint32_t halfHeight = (mImageSize.height + 1) / 2;
// Y plane.
uint8_t* y = mSourceBuffer.Elements();
data.mYChannel = y;
data.mYSize.width = mImageSize.width;
data.mYSize.height = mImageSize.height;
data.mYStride = mImageSize.width;
data.mYSkip = 0;
@ -147,10 +135,9 @@ Image* YUVBufferGenerator::CreateNV21Image() {
// 4:2:0.
data.mCbCrStride = mImageSize.width;
data.mCbCrSize.width = halfWidth;
data.mCbCrSize.height = halfHeight;
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
data.mYUVColorSpace = DefaultColorSpace(data.mYSize);
data.mYUVColorSpace = DefaultColorSpace(mImageSize);
image->SetData(data);
return image;

Просмотреть файл

@ -59,20 +59,16 @@ already_AddRefed<Image> RemoteImageHolder::DeserializeImage(
}
PlanarYCbCrData pData;
pData.mYSize = descriptor.ySize();
pData.mYStride = descriptor.yStride();
pData.mCbCrSize = descriptor.cbCrSize();
pData.mCbCrStride = descriptor.cbCrStride();
// default mYSkip, mCbSkip, mCrSkip because not held in YCbCrDescriptor
pData.mYSkip = pData.mCbSkip = pData.mCrSkip = 0;
gfx::IntRect display = descriptor.display();
pData.mPicX = display.X();
pData.mPicY = display.Y();
pData.mPicSize = display.Size();
pData.mPictureRect = descriptor.display();
pData.mStereoMode = descriptor.stereoMode();
pData.mColorDepth = descriptor.colorDepth();
pData.mYUVColorSpace = descriptor.yUVColorSpace();
pData.mColorRange = descriptor.colorRange();
pData.mChromaSubsampling = descriptor.chromaSubsampling();
pData.mYChannel = ImageDataSerializer::GetYChannel(buffer, descriptor);
pData.mCbChannel = ImageDataSerializer::GetCbChannel(buffer, descriptor);
pData.mCrChannel = ImageDataSerializer::GetCrChannel(buffer, descriptor);

Просмотреть файл

@ -48,7 +48,7 @@ static void SetImageToGreenPixel(PlanarYCbCrImage* aImage) {
data.mCbChannel = greenPixel + 1;
data.mCrChannel = greenPixel + 2;
data.mYStride = data.mCbCrStride = 1;
data.mPicSize = data.mYSize = data.mCbCrSize = gfx::IntSize(1, 1);
data.mPictureRect = gfx::IntRect(0, 0, 1, 1);
data.mYUVColorSpace = gfx::YUVColorSpace::BT601;
aImage->CopyData(data);
}

Просмотреть файл

@ -167,6 +167,8 @@ RefPtr<MediaDataDecoder::DecodePromise> AOMDecoder::ProcessDecode(
b.mPlanes[2].mSkip = 0;
if (img->fmt == AOM_IMG_FMT_I420 || img->fmt == AOM_IMG_FMT_I42016) {
b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
b.mPlanes[1].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
b.mPlanes[1].mWidth = (img->d_w + 1) >> img->x_chroma_shift;

Просмотреть файл

@ -67,6 +67,7 @@ already_AddRefed<MediaData> BlankVideoDataCreator::Create(
buffer.mPlanes[2].mWidth = (mFrameWidth + 1) / 2;
buffer.mPlanes[2].mSkip = 0;
buffer.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
buffer.mYUVColorSpace = gfx::YUVColorSpace::BT601;
return VideoData::CreateAndCopyData(mInfo, mImageContainer, aSample->mOffset,

Просмотреть файл

@ -237,6 +237,12 @@ already_AddRefed<VideoData> DAV1DDecoder::ConstructImage(
b.mPlanes[2].mHeight = (aPicture.p.h + ss_ver) >> ss_ver;
b.mPlanes[2].mWidth = (aPicture.p.w + ss_hor) >> ss_hor;
if (ss_ver) {
b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
} else if (ss_hor) {
b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH;
}
// Timestamp, duration and offset used here are wrong.
// We need to take those values from the decoder. Latest
// dav1d version allows for that.

Просмотреть файл

@ -171,6 +171,12 @@ RefPtr<MediaDataDecoder::DecodePromise> TheoraDecoder::ProcessDecode(
b.mPlanes[2].mWidth = mTheoraInfo.frame_width >> hdec;
b.mPlanes[2].mSkip = 0;
if (vdec) {
b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
} else if (hdec) {
b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH;
}
b.mYUVColorSpace =
DefaultColorSpace({mTheoraInfo.frame_width, mTheoraInfo.frame_height});

Просмотреть файл

@ -164,6 +164,8 @@ RefPtr<MediaDataDecoder::DecodePromise> VPXDecoder::ProcessDecode(
b.mPlanes[2].mSkip = 0;
if (img->fmt == VPX_IMG_FMT_I420) {
b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
b.mPlanes[1].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
b.mPlanes[1].mWidth = (img->d_w + 1) >> img->x_chroma_shift;

Просмотреть файл

@ -56,6 +56,7 @@ void GMPVideoDecoder::Decoded(GMPVideoi420Frame* aDecodedFrame) {
b.mPlanes[i].mSkip = 0;
}
b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
b.mYUVColorSpace =
DefaultColorSpace({decodedFrame->Width(), decodedFrame->Height()});

Просмотреть файл

@ -172,20 +172,22 @@ static jni::ByteBuffer::LocalRef ConvertI420ToNV12Buffer(
const PlanarYCbCrImage* image = aSample->mImage->AsPlanarYCbCrImage();
MOZ_ASSERT(image);
const PlanarYCbCrData* yuv = image->GetData();
size_t ySize = yuv->mYStride * yuv->mYSize.height;
size_t size = ySize + (yuv->mCbCrStride * yuv->mCbCrSize.height * 2);
if (!aYUVBuffer || aYUVBuffer->Capacity() < size) {
aYUVBuffer = MakeRefPtr<MediaByteBuffer>(size);
aYUVBuffer->SetLength(size);
auto ySize = yuv->YDataSize();
auto cbcrSize = yuv->CbCrDataSize();
size_t yLength = yuv->mYStride * ySize.height;
size_t length = yLength + (yuv->mCbCrStride * cbcrSize.height * 2);
if (!aYUVBuffer || aYUVBuffer->Capacity() < length) {
aYUVBuffer = MakeRefPtr<MediaByteBuffer>(length);
aYUVBuffer->SetLength(length);
} else {
MOZ_ASSERT(aYUVBuffer->Length() >= size);
MOZ_ASSERT(aYUVBuffer->Length() >= length);
}
if (libyuv::I420ToNV12(yuv->mYChannel, yuv->mYStride, yuv->mCbChannel,
yuv->mCbCrStride, yuv->mCrChannel, yuv->mCbCrStride,
aYUVBuffer->Elements(), yuv->mYStride,
aYUVBuffer->Elements() + ySize, yuv->mCbCrStride * 2,
yuv->mYSize.width, yuv->mYSize.height) != 0) {
aYUVBuffer->Elements() + yLength, yuv->mCbCrStride * 2,
ySize.width, ySize.height) != 0) {
return nullptr;
}

Просмотреть файл

@ -412,6 +412,7 @@ void AppleVTDecoder::OutputFrame(CVPixelBufferRef aImage,
buffer.mPlanes[2].mHeight = (height + 1) / 2;
buffer.mPlanes[2].mSkip = 0;
buffer.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
buffer.mYUVColorSpace = mColorSpace;
buffer.mColorRange = mColorRange;

Просмотреть файл

@ -518,6 +518,11 @@ CVPixelBufferRef AppleVTEncoder::CreateCVPixelBuffer(const Image* aSource) {
OSType format = MapPixelFormat(mConfig.mSourcePixelFormat).ref();
size_t numPlanes = NumberOfPlanes(mConfig.mSourcePixelFormat);
const PlanarYCbCrImage::Data* yuv = image->GetData();
if (!yuv) {
return nullptr;
}
auto ySize = yuv->YDataSize();
auto cbcrSize = yuv->CbCrDataSize();
void* addresses[3] = {};
size_t widths[3] = {};
size_t heights[3] = {};
@ -525,20 +530,20 @@ CVPixelBufferRef AppleVTEncoder::CreateCVPixelBuffer(const Image* aSource) {
switch (numPlanes) {
case 3:
addresses[2] = yuv->mCrChannel;
widths[2] = yuv->mCbCrSize.width;
heights[2] = yuv->mCbCrSize.height;
widths[2] = cbcrSize.width;
heights[2] = cbcrSize.height;
strides[2] = yuv->mCbCrStride;
[[fallthrough]];
case 2:
addresses[1] = yuv->mCbChannel;
widths[1] = yuv->mCbCrSize.width;
heights[1] = yuv->mCbCrSize.height;
widths[1] = cbcrSize.width;
heights[1] = cbcrSize.height;
strides[1] = yuv->mCbCrStride;
[[fallthrough]];
case 1:
addresses[0] = yuv->mYChannel;
widths[0] = yuv->mYSize.width;
heights[0] = yuv->mYSize.height;
widths[0] = ySize.width;
heights[0] = ySize.height;
strides[0] = yuv->mYStride;
break;
default:
@ -548,9 +553,9 @@ CVPixelBufferRef AppleVTEncoder::CreateCVPixelBuffer(const Image* aSource) {
CVPixelBufferRef buffer = nullptr;
image->AddRef(); // Grip input buffers.
CVReturn rv = CVPixelBufferCreateWithPlanarBytes(
kCFAllocatorDefault, yuv->mPicSize.width, yuv->mPicSize.height, format,
nullptr /* dataPtr */, 0 /* dataSize */, numPlanes, addresses, widths,
heights, strides, ReleaseImage /* releaseCallback */,
kCFAllocatorDefault, yuv->mPictureRect.width, yuv->mPictureRect.height,
format, nullptr /* dataPtr */, 0 /* dataSize */, numPlanes, addresses,
widths, heights, strides, ReleaseImage /* releaseCallback */,
image /* releaseRefCon */, nullptr /* pixelBufferAttributes */, &buffer);
if (rv == kCVReturnSuccess) {
return buffer;

Просмотреть файл

@ -501,20 +501,8 @@ static bool IsYUV420Sampling(const AVPixelFormat& aFormat) {
}
layers::TextureClient*
FFmpegVideoDecoder<LIBAV_VER>::AllocateTextueClientForImage(
FFmpegVideoDecoder<LIBAV_VER>::AllocateTextureClientForImage(
struct AVCodecContext* aCodecContext, PlanarYCbCrImage* aImage) {
layers::PlanarYCbCrData data =
CreateEmptyPlanarYCbCrData(aCodecContext, mInfo);
// Allocate a shmem buffer for image.
if (!aImage->CreateEmptyBuffer(data)) {
return nullptr;
}
return aImage->GetTextureClient(mImageAllocator);
}
layers::PlanarYCbCrData
FFmpegVideoDecoder<LIBAV_VER>::CreateEmptyPlanarYCbCrData(
struct AVCodecContext* aCodecContext, const VideoInfo& aInfo) {
MOZ_ASSERT(
IsColorFormatSupportedForUsingCustomizedBuffer(aCodecContext->pix_fmt));
@ -544,42 +532,39 @@ FFmpegVideoDecoder<LIBAV_VER>::CreateEmptyPlanarYCbCrData(
// plane Cb/Cr
// width 1280 height 720 -> adjusted-width 1280 adjusted-height 736
layers::PlanarYCbCrData data;
auto paddedYSize =
const auto yDims =
gfx::IntSize{aCodecContext->coded_width, aCodecContext->coded_height};
auto paddedYSize = yDims;
mLib->avcodec_align_dimensions(aCodecContext, &paddedYSize.width,
&paddedYSize.height);
data.mYSize = gfx::IntSize{paddedYSize.Width(), paddedYSize.Height()};
data.mYStride = data.mYSize.Width() * bytesPerChannel;
data.mYStride = paddedYSize.Width() * bytesPerChannel;
MOZ_ASSERT(
IsColorFormatSupportedForUsingCustomizedBuffer(aCodecContext->pix_fmt));
const auto yDims =
gfx::IntSize{aCodecContext->coded_width, aCodecContext->coded_height};
auto uvDims = yDims;
if (IsYUV420Sampling(aCodecContext->pix_fmt)) {
uvDims.width = (uvDims.width + 1) / 2;
uvDims.height = (uvDims.height + 1) / 2;
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
}
auto paddedCbCrSize = uvDims;
mLib->avcodec_align_dimensions(aCodecContext, &paddedCbCrSize.width,
&paddedCbCrSize.height);
data.mCbCrSize =
gfx::IntSize{paddedCbCrSize.Width(), paddedCbCrSize.Height()};
data.mCbCrStride = data.mCbCrSize.Width() * bytesPerChannel;
data.mCbCrStride = paddedCbCrSize.Width() * bytesPerChannel;
// Setting other attributes
data.mPicSize = gfx::IntSize{aCodecContext->width, aCodecContext->height};
const gfx::IntRect picture =
aInfo.ScaledImageRect(data.mPicSize.Width(), data.mPicSize.Height());
data.mPicX = picture.x;
data.mPicY = picture.y;
data.mStereoMode = aInfo.mStereoMode;
data.mPictureRect = gfx::IntRect(
mInfo.ScaledImageRect(aCodecContext->width, aCodecContext->height)
.TopLeft(),
gfx::IntSize(aCodecContext->width, aCodecContext->height));
data.mStereoMode = mInfo.mStereoMode;
if (aCodecContext->colorspace != AVCOL_SPC_UNSPECIFIED) {
data.mYUVColorSpace =
TransferAVColorSpaceToYUVColorSpace(aCodecContext->colorspace);
} else {
data.mYUVColorSpace = aInfo.mColorSpace ? *aInfo.mColorSpace
: DefaultColorSpace(data.mPicSize);
data.mYUVColorSpace = mInfo.mColorSpace
? *mInfo.mColorSpace
: DefaultColorSpace(data.mPictureRect.Size());
}
data.mColorDepth = GetColorDepth(aCodecContext->pix_fmt);
data.mColorRange = aCodecContext->color_range == AVCOL_RANGE_JPEG
@ -588,10 +573,16 @@ FFmpegVideoDecoder<LIBAV_VER>::CreateEmptyPlanarYCbCrData(
FFMPEG_LOGV(
"Created plane data, YSize=(%d, %d), CbCrSize=(%d, %d), "
"CroppedYSize=(%d, %d), CroppedCbCrSize=(%d, %d), ColorDepth=%hhu",
data.mYSize.Width(), data.mYSize.Height(), data.mCbCrSize.Width(),
data.mCbCrSize.Height(), data.mPicSize.Width(), data.mPicSize.Height(),
uvDims.Width(), uvDims.Height(), static_cast<uint8_t>(data.mColorDepth));
return data;
paddedYSize.Width(), paddedYSize.Height(), paddedCbCrSize.Width(),
paddedCbCrSize.Height(), data.YPictureSize().Width(),
data.YPictureSize().Height(), data.CbCrPictureSize().Width(),
data.CbCrPictureSize().Height(), static_cast<uint8_t>(data.mColorDepth));
// Allocate a shmem buffer for image.
if (!aImage->CreateEmptyBuffer(data, paddedYSize, paddedCbCrSize)) {
return nullptr;
}
return aImage->GetTextureClient(mImageAllocator);
}
int FFmpegVideoDecoder<LIBAV_VER>::GetVideoBuffer(
@ -655,7 +646,7 @@ int FFmpegVideoDecoder<LIBAV_VER>::GetVideoBuffer(
}
RefPtr<layers::TextureClient> texture =
AllocateTextueClientForImage(aCodecContext, image);
AllocateTextureClientForImage(aCodecContext, image);
if (!texture) {
FFMPEG_LOG("Failed to allocate a texture client");
return AVERROR(EINVAL);
@ -999,6 +990,7 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImage(
|| mCodecContext->pix_fmt == AV_PIX_FMT_YUV422P12LE
#endif
) {
b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH;
b.mPlanes[1].mWidth = b.mPlanes[2].mWidth = (mFrame->width + 1) >> 1;
b.mPlanes[1].mHeight = b.mPlanes[2].mHeight = mFrame->height;
if (mCodecContext->pix_fmt == AV_PIX_FMT_YUV422P10LE) {
@ -1010,6 +1002,7 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImage(
}
#endif
} else {
b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
b.mPlanes[1].mWidth = b.mPlanes[2].mWidth = (mFrame->width + 1) >> 1;
b.mPlanes[1].mHeight = b.mPlanes[2].mHeight = (mFrame->height + 1) >> 1;
if (mCodecContext->pix_fmt == AV_PIX_FMT_YUV420P10LE) {

Просмотреть файл

@ -107,12 +107,9 @@ class FFmpegVideoDecoder<LIBAV_VER>
}
#if LIBAVCODEC_VERSION_MAJOR >= 57 && LIBAVUTIL_VERSION_MAJOR >= 56
layers::TextureClient* AllocateTextueClientForImage(
layers::TextureClient* AllocateTextureClientForImage(
struct AVCodecContext* aCodecContext, layers::PlanarYCbCrImage* aImage);
layers::PlanarYCbCrData CreateEmptyPlanarYCbCrData(
struct AVCodecContext* aCodecContext, const VideoInfo& aInfo);
gfx::IntSize GetAlignmentVideoFrameSize(struct AVCodecContext* aCodecContext,
int32_t aWidth,
int32_t aHeight) const;
@ -177,7 +174,7 @@ class FFmpegVideoDecoder<LIBAV_VER>
// its internal decoding queue.
//
// When an image is removed from mAllocatedImages it's recycled
// for a new frame by AllocateTextueClientForImage() in
// for a new frame by AllocateTextureClientForImage() in
// FFmpegVideoDecoder::GetVideoBuffer().
nsTHashSet<RefPtr<ImageBufferWrapper>> mAllocatedImages;
#endif

Просмотреть файл

@ -937,6 +937,8 @@ already_AddRefed<VideoData> MediaDataHelper::CreateYUV420VideoData(
b.mPlanes[2].mStride = (stride + 1) / 2;
b.mPlanes[2].mSkip = 0;
b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
VideoInfo info(*mTrackInfo->GetAsVideoInfo());
auto maybeColorSpace = info.mColorSpace;

Просмотреть файл

@ -278,8 +278,10 @@ already_AddRefed<IMFSample> WMFMediaDataEncoder<T>::ConvertToNV12InputSample(
const PlanarYCbCrImage* image = aData->mImage->AsPlanarYCbCrImage();
MOZ_ASSERT(image);
const PlanarYCbCrData* yuv = image->GetData();
size_t yLength = yuv->mYStride * yuv->mYSize.height;
size_t length = yLength + (yuv->mCbCrStride * yuv->mCbCrSize.height * 2);
auto ySize = yuv->YDataSize();
auto cbcrSize = yuv->CbCrDataSize();
size_t yLength = yuv->mYStride * ySize.height;
size_t length = yLength + (yuv->mCbCrStride * cbcrSize.height * 2);
RefPtr<IMFSample> input;
HRESULT hr = mEncoder->CreateInputSample(&input, length);
@ -295,12 +297,11 @@ already_AddRefed<IMFSample> WMFMediaDataEncoder<T>::ConvertToNV12InputSample(
LockBuffer lockBuffer(buffer);
NS_ENSURE_TRUE(SUCCEEDED(lockBuffer.Result()), nullptr);
bool ok =
libyuv::I420ToNV12(yuv->mYChannel, yuv->mYStride, yuv->mCbChannel,
yuv->mCbCrStride, yuv->mCrChannel, yuv->mCbCrStride,
lockBuffer.Data(), yuv->mYStride,
lockBuffer.Data() + yLength, yuv->mCbCrStride * 2,
yuv->mYSize.width, yuv->mYSize.height) == 0;
bool ok = libyuv::I420ToNV12(
yuv->mYChannel, yuv->mYStride, yuv->mCbChannel,
yuv->mCbCrStride, yuv->mCrChannel, yuv->mCbCrStride,
lockBuffer.Data(), yuv->mYStride, lockBuffer.Data() + yLength,
yuv->mCbCrStride * 2, ySize.width, ySize.height) == 0;
NS_ENSURE_TRUE(ok, nullptr);
hr = input->SetSampleTime(UsecsToHNs(aData->mTime.ToMicroseconds()));

Просмотреть файл

@ -682,6 +682,8 @@ WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample,
b.mPlanes[2].mSkip = 1;
}
b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
// YuvColorSpace
b.mYUVColorSpace =
mColorSpace.refOr(DefaultColorSpace({videoWidth, videoHeight}));

Просмотреть файл

@ -259,17 +259,14 @@ static void AllocateSolidColorFrame(layers::PlanarYCbCrData& aData, int aWidth,
memset(frame + yLen + cbLen, aCr, crLen);
aData.mYChannel = frame;
aData.mYSize = IntSize(aWidth, aHeight);
aData.mYStride = aWidth;
aData.mCbCrStride = aWidth >> 1;
aData.mCbChannel = frame + yLen;
aData.mCrChannel = aData.mCbChannel + cbLen;
aData.mCbCrSize = IntSize(aWidth >> 1, aHeight >> 1);
aData.mPicX = 0;
aData.mPicY = 0;
aData.mPicSize = IntSize(aWidth, aHeight);
aData.mPictureRect = IntRect(0, 0, aWidth, aHeight);
aData.mStereoMode = StereoMode::MONO;
aData.mYUVColorSpace = gfx::YUVColorSpace::BT601;
aData.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
}
static void ReleaseFrame(layers::PlanarYCbCrData& aData) {

Просмотреть файл

@ -496,18 +496,14 @@ int MediaEngineRemoteVideoSource::DeliverFrame(
layers::PlanarYCbCrData data;
data.mYChannel = const_cast<uint8_t*>(buffer->DataY());
data.mYSize = gfx::IntSize(buffer->width(), buffer->height());
data.mYStride = buffer->StrideY();
MOZ_ASSERT(buffer->StrideU() == buffer->StrideV());
data.mCbCrStride = buffer->StrideU();
data.mCbChannel = const_cast<uint8_t*>(buffer->DataU());
data.mCrChannel = const_cast<uint8_t*>(buffer->DataV());
data.mCbCrSize =
gfx::IntSize((buffer->width() + 1) / 2, (buffer->height() + 1) / 2);
data.mPicX = 0;
data.mPicY = 0;
data.mPicSize = gfx::IntSize(buffer->width(), buffer->height());
data.mPictureRect = gfx::IntRect(0, 0, buffer->width(), buffer->height());
data.mYUVColorSpace = gfx::YUVColorSpace::BT601;
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
RefPtr<layers::PlanarYCbCrImage> image =
mImageContainer->CreatePlanarYCbCrImage();

Просмотреть файл

@ -29,11 +29,11 @@ class ImageBuffer : public webrtc::VideoFrameBuffer {
}
const layers::PlanarYCbCrData* data = image->GetData();
rtc::scoped_refptr<webrtc::I420BufferInterface> buf =
webrtc::WrapI420Buffer(data->mPicSize.width, data->mPicSize.height,
data->mYChannel, data->mYStride,
data->mCbChannel, data->mCbCrStride,
data->mCrChannel, data->mCbCrStride,
rtc::KeepRefUntilDone(image.get()));
webrtc::WrapI420Buffer(
data->mPictureRect.width, data->mPictureRect.height,
data->mYChannel, data->mYStride, data->mCbChannel,
data->mCbCrStride, data->mCrChannel, data->mCbCrStride,
rtc::KeepRefUntilDone(image.get()));
return buf;
}

Просмотреть файл

@ -289,14 +289,13 @@ static already_AddRefed<VideoData> CreateVideoDataFromWebrtcVideoFrame(
PlanarYCbCrData yCbCrData;
yCbCrData.mYChannel = const_cast<uint8_t*>(i420->DataY());
yCbCrData.mYSize = gfx::IntSize(i420->width(), i420->height());
yCbCrData.mYStride = i420->StrideY();
yCbCrData.mCbChannel = const_cast<uint8_t*>(i420->DataU());
yCbCrData.mCrChannel = const_cast<uint8_t*>(i420->DataV());
yCbCrData.mCbCrSize = gfx::IntSize(i420->ChromaWidth(), i420->ChromaHeight());
MOZ_ASSERT(i420->StrideU() == i420->StrideV());
yCbCrData.mCbCrStride = i420->StrideU();
yCbCrData.mPicSize = gfx::IntSize(i420->width(), i420->height());
yCbCrData.mPictureRect = gfx::IntRect(0, 0, i420->width(), i420->height());
yCbCrData.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
RefPtr<PlanarYCbCrImage> image =
new RecyclingPlanarYCbCrImage(new BufferRecycleBin());

Просмотреть файл

@ -1518,20 +1518,17 @@ class MediaPipelineReceiveVideo::PipelineListener
PlanarYCbCrData yuvData;
yuvData.mYChannel = const_cast<uint8_t*>(i420->DataY());
yuvData.mYSize = IntSize(i420->width(), i420->height());
yuvData.mYStride = i420->StrideY();
MOZ_ASSERT(i420->StrideU() == i420->StrideV());
yuvData.mCbCrStride = i420->StrideU();
yuvData.mCbChannel = const_cast<uint8_t*>(i420->DataU());
yuvData.mCrChannel = const_cast<uint8_t*>(i420->DataV());
yuvData.mCbCrSize =
IntSize((i420->width() + 1) >> 1, (i420->height() + 1) >> 1);
yuvData.mPicX = 0;
yuvData.mPicY = 0;
yuvData.mPicSize = IntSize(i420->width(), i420->height());
yuvData.mPictureRect = IntRect(0, 0, i420->width(), i420->height());
yuvData.mStereoMode = StereoMode::MONO;
// This isn't the best default.
yuvData.mYUVColorSpace = gfx::YUVColorSpace::BT601;
yuvData.mChromaSubsampling =
gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
if (!yuvImage->CopyData(yuvData)) {
MOZ_ASSERT(false);

Просмотреть файл

@ -507,6 +507,27 @@ static inline uint32_t RescalingFactorForColorDepth(ColorDepth aColorDepth) {
return factor;
}
enum class ChromaSubsampling : uint8_t {
FULL,
HALF_WIDTH,
HALF_WIDTH_AND_HEIGHT,
_First = FULL,
_Last = HALF_WIDTH_AND_HEIGHT,
};
template <typename T>
static inline T ChromaSize(const T& aYSize, ChromaSubsampling aSubsampling) {
switch (aSubsampling) {
case ChromaSubsampling::FULL:
return aYSize;
case ChromaSubsampling::HALF_WIDTH:
return T((aYSize.width + 1) / 2, aYSize.height);
case ChromaSubsampling::HALF_WIDTH_AND_HEIGHT:
return T((aYSize.width + 1) / 2, (aYSize.height + 1) / 2);
}
MOZ_CRASH("bad ChromaSubsampling");
}
enum class FilterType : int8_t {
BLEND = 0,
TRANSFORM,

Просмотреть файл

@ -895,26 +895,34 @@ bool GLBlitHelper::BlitPlanarYCbCr(const PlanarYCbCrData& yuvData,
// --
if (yuvData.mYSkip || yuvData.mCbSkip || yuvData.mCrSkip ||
yuvData.mYSize.width < 0 || yuvData.mYSize.height < 0 ||
yuvData.mCbCrSize.width < 0 || yuvData.mCbCrSize.height < 0 ||
auto ySize = yuvData.YDataSize();
auto cbcrSize = yuvData.CbCrDataSize();
if (yuvData.mYSkip || yuvData.mCbSkip || yuvData.mCrSkip || ySize.width < 0 ||
ySize.height < 0 || cbcrSize.width < 0 || cbcrSize.height < 0 ||
yuvData.mYStride < 0 || yuvData.mCbCrStride < 0) {
gfxCriticalError() << "Unusual PlanarYCbCrData: " << yuvData.mYSkip << ","
<< yuvData.mCbSkip << "," << yuvData.mCrSkip << ", "
<< yuvData.mYSize.width << "," << yuvData.mYSize.height
<< ", " << yuvData.mCbCrSize.width << ","
<< yuvData.mCbCrSize.height << ", " << yuvData.mYStride
<< "," << yuvData.mCbCrStride;
<< ySize.width << "," << ySize.height << ", "
<< cbcrSize.width << "," << cbcrSize.height << ", "
<< yuvData.mYStride << "," << yuvData.mCbCrStride;
return false;
}
gfx::IntSize divisors;
if (!GuessDivisors(yuvData.mYSize, yuvData.mCbCrSize, &divisors)) {
gfxCriticalError() << "GuessDivisors failed:" << yuvData.mYSize.width << ","
<< yuvData.mYSize.height << ", "
<< yuvData.mCbCrSize.width << ","
<< yuvData.mCbCrSize.height;
return false;
switch (yuvData.mChromaSubsampling) {
case gfx::ChromaSubsampling::FULL:
divisors = gfx::IntSize(1, 1);
break;
case gfx::ChromaSubsampling::HALF_WIDTH:
divisors = gfx::IntSize(2, 1);
break;
case gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT:
divisors = gfx::IntSize(2, 2);
break;
default:
gfxCriticalError() << "Unknown chroma subsampling:"
<< int(yuvData.mChromaSubsampling);
return false;
}
// --
@ -937,8 +945,9 @@ bool GLBlitHelper::BlitPlanarYCbCr(const PlanarYCbCrData& yuvData,
const ScopedSaveMultiTex saveTex(mGL, 3, LOCAL_GL_TEXTURE_2D);
const ResetUnpackState reset(mGL);
const gfx::IntSize yTexSize(yuvData.mYStride, yuvData.mYSize.height);
const gfx::IntSize uvTexSize(yuvData.mCbCrStride, yuvData.mCbCrSize.height);
const gfx::IntSize yTexSize(yuvData.mYStride, yuvData.YDataSize().height);
const gfx::IntSize uvTexSize(yuvData.mCbCrStride,
yuvData.CbCrDataSize().height);
if (yTexSize != mYuvUploads_YSize || uvTexSize != mYuvUploads_UVSize) {
mYuvUploads_YSize = yTexSize;
@ -978,7 +987,7 @@ bool GLBlitHelper::BlitPlanarYCbCr(const PlanarYCbCrData& yuvData,
// --
const auto& clipRect = yuvData.GetPictureRect();
const auto& clipRect = yuvData.mPictureRect;
const auto srcOrigin = OriginPos::BottomLeft;
const bool yFlip = (destOrigin != srcOrigin);

Просмотреть файл

@ -210,9 +210,9 @@ bool GLBlitHelper::BlitImage(layers::D3D11YCbCrImage* const srcImage,
const WindowsHandle handles[3] = {(WindowsHandle)data->mHandles[0],
(WindowsHandle)data->mHandles[1],
(WindowsHandle)data->mHandles[2]};
return BlitAngleYCbCr(handles, srcImage->mPictureRect, srcImage->mYSize,
srcImage->mCbCrSize, srcImage->mColorSpace, destSize,
destOrigin);
return BlitAngleYCbCr(handles, srcImage->mPictureRect, srcImage->YDataSize(),
srcImage->CbCrDataSize(), srcImage->mColorSpace,
destSize, destOrigin);
}
// -------------------------------------

Просмотреть файл

@ -711,6 +711,13 @@ struct ParamTraits<mozilla::StereoMode>
mozilla::StereoMode::MONO,
mozilla::StereoMode::MAX> {};
template <>
struct ParamTraits<mozilla::gfx::ChromaSubsampling>
: public ContiguousEnumSerializerInclusive<
mozilla::gfx::ChromaSubsampling,
mozilla::gfx::ChromaSubsampling::_First,
mozilla::gfx::ChromaSubsampling::_Last> {};
template <>
struct ParamTraits<mozilla::gfx::ImplicitlyCopyableFloatArray>
: public ParamTraits<nsTArray<float>> {

Просмотреть файл

@ -143,7 +143,8 @@ BufferTextureData* BufferTextureData::CreateForYCbCr(
const gfx::IntSize& aYSize, uint32_t aYStride,
const gfx::IntSize& aCbCrSize, uint32_t aCbCrStride, StereoMode aStereoMode,
gfx::ColorDepth aColorDepth, gfx::YUVColorSpace aYUVColorSpace,
gfx::ColorRange aColorRange, TextureFlags aTextureFlags) {
gfx::ColorRange aColorRange, gfx::ChromaSubsampling aSubsampling,
TextureFlags aTextureFlags) {
uint32_t bufSize = ImageDataSerializer::ComputeYCbCrBufferSize(
aYSize, aYStride, aCbCrSize, aCbCrStride);
if (bufSize == 0) {
@ -157,9 +158,10 @@ BufferTextureData* BufferTextureData::CreateForYCbCr(
aCbCrSize.height, yOffset, cbOffset,
crOffset);
YCbCrDescriptor descriptor = YCbCrDescriptor(
aDisplay, aYSize, aYStride, aCbCrSize, aCbCrStride, yOffset, cbOffset,
crOffset, aStereoMode, aColorDepth, aYUVColorSpace, aColorRange);
YCbCrDescriptor descriptor =
YCbCrDescriptor(aDisplay, aYSize, aYStride, aCbCrSize, aCbCrStride,
yOffset, cbOffset, crOffset, aStereoMode, aColorDepth,
aYUVColorSpace, aColorRange, aSubsampling);
return CreateInternal(
aAllocator ? aAllocator->GetTextureForwarder() : nullptr, descriptor,
@ -218,6 +220,11 @@ Maybe<StereoMode> BufferTextureData::GetStereoMode() const {
return ImageDataSerializer::StereoModeFromBufferDescriptor(mDescriptor);
}
Maybe<gfx::ChromaSubsampling> BufferTextureData::GetChromaSubsampling() const {
return ImageDataSerializer::ChromaSubsamplingFromBufferDescriptor(
mDescriptor);
}
gfx::SurfaceFormat BufferTextureData::GetFormat() const {
return ImageDataSerializer::FormatFromBufferDescriptor(mDescriptor);
}

Просмотреть файл

@ -35,7 +35,7 @@ class BufferTextureData : public TextureData {
const gfx::IntSize& aCbCrSize, uint32_t aCbCrStride,
StereoMode aStereoMode, gfx::ColorDepth aColorDepth,
gfx::YUVColorSpace aYUVColorSpace, gfx::ColorRange aColorRange,
TextureFlags aTextureFlags);
gfx::ChromaSubsampling aSubsampling, TextureFlags aTextureFlags);
bool Lock(OpenMode aMode) override { return true; }
@ -68,6 +68,8 @@ class BufferTextureData : public TextureData {
Maybe<StereoMode> GetStereoMode() const;
Maybe<gfx::ChromaSubsampling> GetChromaSubsampling() const;
gfx::IntRect GetPictureRect() const;
protected:

Просмотреть файл

@ -27,13 +27,11 @@ D3D11YCbCrImage::~D3D11YCbCrImage() {}
bool D3D11YCbCrImage::SetData(KnowsCompositor* aAllocator,
ImageContainer* aContainer,
const PlanarYCbCrData& aData) {
mPictureRect = IntRect(aData.mPicX, aData.mPicY, aData.mPicSize.width,
aData.mPicSize.height);
mYSize = aData.mYSize;
mCbCrSize = aData.mCbCrSize;
mPictureRect = aData.mPictureRect;
mColorDepth = aData.mColorDepth;
mColorSpace = aData.mYUVColorSpace;
mColorRange = aData.mColorRange;
mChromaSubsampling = Data.mChromaSubsampling;
D3D11YCbCrRecycleAllocator* allocator =
aContainer->GetD3D11YCbCrRecycleAllocator(aAllocator);
@ -90,13 +88,13 @@ bool D3D11YCbCrImage::SetData(KnowsCompositor* aAllocator,
AutoLockD3D11Texture lockCr(textureCr);
ctx->UpdateSubresource(textureY, 0, nullptr, aData.mYChannel, aData.mYStride,
aData.mYStride * aData.mYSize.height);
aData.mYStride * aData.YDataSize().height);
ctx->UpdateSubresource(textureCb, 0, nullptr, aData.mCbChannel,
aData.mCbCrStride,
aData.mCbCrStride * aData.mCbCrSize.height);
aData.mCbCrStride * aData.CbCrDataSize().height);
ctx->UpdateSubresource(textureCr, 0, nullptr, aData.mCrChannel,
aData.mCbCrStride,
aData.mCbCrStride * aData.mCbCrSize.height);
aData.mCbCrStride * aData.CbCrDataSize().height);
return true;
}
@ -239,16 +237,13 @@ already_AddRefed<SourceSurface> D3D11YCbCrImage::GetAsSourceSurface() {
MOZ_ASSERT(mapCb.RowPitch == mapCr.RowPitch);
data.mPicX = mPictureRect.X();
data.mPicY = mPictureRect.Y();
data.mPicSize = mPictureRect.Size();
data.mPictureRect = mPictureRect;
data.mStereoMode = StereoMode::MONO;
data.mColorDepth = mColorDepth;
data.mYUVColorSpace = mColorSpace;
data.mColorRange = mColorRange;
data.mChromaSubsampling = mChromaSubsampling;
data.mYSkip = data.mCbSkip = data.mCrSkip = 0;
data.mYSize = mYSize;
data.mCbCrSize = mCbCrSize;
data.mYChannel = static_cast<uint8_t*>(mapY.pData);
data.mYStride = mapY.RowPitch;
data.mCbChannel = static_cast<uint8_t*>(mapCb.pData);
@ -332,9 +327,9 @@ class AutoCheckLockD3D11Texture final {
DXGIYCbCrTextureAllocationHelper::DXGIYCbCrTextureAllocationHelper(
const PlanarYCbCrData& aData, TextureFlags aTextureFlags,
ID3D11Device* aDevice)
: ITextureClientAllocationHelper(gfx::SurfaceFormat::YUV, aData.mPicSize,
BackendSelector::Content, aTextureFlags,
ALLOC_DEFAULT),
: ITextureClientAllocationHelper(
gfx::SurfaceFormat::YUV, aData.mPictureRect.Size(),
BackendSelector::Content, aTextureFlags, ALLOC_DEFAULT),
mData(aData),
mDevice(aDevice) {}
@ -344,9 +339,9 @@ bool DXGIYCbCrTextureAllocationHelper::IsCompatible(
DXGIYCbCrTextureData* dxgiData =
aTextureClient->GetInternalData()->AsDXGIYCbCrTextureData();
if (!dxgiData || aTextureClient->GetSize() != mData.mPicSize ||
dxgiData->GetYSize() != mData.mYSize ||
dxgiData->GetCbCrSize() != mData.mCbCrSize ||
if (!dxgiData || aTextureClient->GetSize() != mData.mPictureRect.Size() ||
dxgiData->GetYSize() != mData.YDataSize() ||
dxgiData->GetCbCrSize() != mData.CbCrDataSize() ||
dxgiData->GetColorDepth() != mData.mColorDepth ||
dxgiData->GetYUVColorSpace() != mData.mYUVColorSpace) {
return false;
@ -378,10 +373,12 @@ bool DXGIYCbCrTextureAllocationHelper::IsCompatible(
already_AddRefed<TextureClient> DXGIYCbCrTextureAllocationHelper::Allocate(
KnowsCompositor* aAllocator) {
auto ySize = mData.YDataSize();
auto cbcrSize = mData.CbCrDataSize();
CD3D11_TEXTURE2D_DESC newDesc(mData.mColorDepth == gfx::ColorDepth::COLOR_8
? DXGI_FORMAT_R8_UNORM
: DXGI_FORMAT_R16_UNORM,
mData.mYSize.width, mData.mYSize.height, 1, 1);
ySize.width, ySize.height, 1, 1);
// WebRender requests keyed mutex
if (mDevice == gfx::DeviceManagerDx::Get()->GetCompositorDevice() &&
!gfxVars::UseWebRender()) {
@ -409,8 +406,8 @@ already_AddRefed<TextureClient> DXGIYCbCrTextureAllocationHelper::Allocate(
hr = mDevice->CreateTexture2D(&newDesc, nullptr, getter_AddRefs(textureY));
NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
newDesc.Width = mData.mCbCrSize.width;
newDesc.Height = mData.mCbCrSize.height;
newDesc.Width = cbcrSize.width;
newDesc.Height = cbcrSize.height;
RefPtr<ID3D11Texture2D> textureCb;
hr = mDevice->CreateTexture2D(&newDesc, nullptr, getter_AddRefs(textureCb));
@ -424,10 +421,9 @@ already_AddRefed<TextureClient> DXGIYCbCrTextureAllocationHelper::Allocate(
aAllocator ? aAllocator->GetTextureForwarder() : nullptr;
return TextureClient::CreateWithData(
DXGIYCbCrTextureData::Create(textureY, textureCb, textureCr,
mData.mPicSize, mData.mYSize,
mData.mCbCrSize, mData.mColorDepth,
mData.mYUVColorSpace, mData.mColorRange),
DXGIYCbCrTextureData::Create(
textureY, textureCb, textureCr, mData.mPictureRect.Size(), ySize,
cbcrSize, mData.mColorDepth, mData.mYUVColorSpace, mData.mColorRange),
mTextureFlags, forwarder);
}

Просмотреть файл

@ -73,12 +73,11 @@ class D3D11YCbCrImage : public Image {
private:
const DXGIYCbCrTextureData* GetData() const;
gfx::IntSize mYSize;
gfx::IntSize mCbCrSize;
gfx::IntRect mPictureRect;
gfx::ColorDepth mColorDepth;
gfx::YUVColorSpace mColorSpace;
gfx::ColorRange mColorRange;
gfx::ChromaSubsampling mChromaSubsampling;
RefPtr<TextureClient> mTextureClient;
};

Просмотреть файл

@ -85,13 +85,13 @@ bool IMFYCbCrImage::CopyDataToTexture(const Data& aData, ID3D11Device* aDevice,
D3D11_BOX box;
box.front = box.top = box.left = 0;
box.back = 1;
box.right = aData.mYSize.width;
box.bottom = aData.mYSize.height;
box.right = aData.YDataSize().width;
box.bottom = aData.YDataSize().height;
ctx->UpdateSubresource(textureY, 0, &box, aData.mYChannel, aData.mYStride,
0);
box.right = aData.mCbCrSize.width;
box.bottom = aData.mCbCrSize.height;
box.right = aData.CbCrDataSize().width;
box.bottom = aData.CbCrDataSize().height;
ctx->UpdateSubresource(textureCb, 0, &box, aData.mCbChannel,
aData.mCbCrStride, 0);
ctx->UpdateSubresource(textureCr, 0, &box, aData.mCrChannel,

Просмотреть файл

@ -511,20 +511,16 @@ Maybe<PlanarYCbCrData> PlanarYCbCrData::From(
}
PlanarYCbCrData yuvData;
yuvData.mYSize = yuvDesc.ySize();
yuvData.mYStride = AssertedCast<int32_t>(yuvDesc.yStride());
yuvData.mCbCrSize = yuvDesc.cbCrSize();
yuvData.mCbCrStride = AssertedCast<int32_t>(yuvDesc.cbCrStride());
// default mYSkip, mCbSkip, mCrSkip because not held in YCbCrDescriptor
yuvData.mYSkip = yuvData.mCbSkip = yuvData.mCrSkip = 0;
gfx::IntRect display = yuvDesc.display();
yuvData.mPicX = display.X();
yuvData.mPicY = display.Y();
yuvData.mPicSize = display.Size();
yuvData.mPictureRect = yuvDesc.display();
yuvData.mStereoMode = yuvDesc.stereoMode();
yuvData.mColorDepth = yuvDesc.colorDepth();
yuvData.mYUVColorSpace = yuvDesc.yUVColorSpace();
yuvData.mColorRange = yuvDesc.colorRange();
yuvData.mChromaSubsampling = yuvDesc.chromaSubsampling();
const auto GetPlanePtr = [&](const uint32_t beginOffset,
const gfx::IntSize size,
@ -541,22 +537,22 @@ Maybe<PlanarYCbCrData> PlanarYCbCrData::From(
return (buffer->begin() + beginOffset).get();
};
yuvData.mYChannel =
GetPlanePtr(yuvDesc.yOffset(), yuvData.mYSize, yuvData.mYStride);
GetPlanePtr(yuvDesc.yOffset(), yuvDesc.ySize(), yuvData.mYStride);
yuvData.mCbChannel =
GetPlanePtr(yuvDesc.cbOffset(), yuvData.mCbCrSize, yuvData.mCbCrStride);
GetPlanePtr(yuvDesc.cbOffset(), yuvDesc.cbCrSize(), yuvData.mCbCrStride);
yuvData.mCrChannel =
GetPlanePtr(yuvDesc.crOffset(), yuvData.mCbCrSize, yuvData.mCbCrStride);
GetPlanePtr(yuvDesc.crOffset(), yuvDesc.cbCrSize(), yuvData.mCbCrStride);
if (yuvData.mYSkip || yuvData.mCbSkip || yuvData.mCrSkip ||
yuvData.mYSize.width < 0 || yuvData.mYSize.height < 0 ||
yuvData.mCbCrSize.width < 0 || yuvData.mCbCrSize.height < 0 ||
yuvDesc.ySize().width < 0 || yuvDesc.ySize().height < 0 ||
yuvDesc.cbCrSize().width < 0 || yuvDesc.cbCrSize().height < 0 ||
yuvData.mYStride < 0 || yuvData.mCbCrStride < 0 || !yuvData.mYChannel ||
!yuvData.mCbChannel || !yuvData.mCrChannel) {
gfxCriticalError() << "Unusual PlanarYCbCrData: " << yuvData.mYSkip << ","
<< yuvData.mCbSkip << "," << yuvData.mCrSkip << ", "
<< yuvData.mYSize.width << "," << yuvData.mYSize.height
<< ", " << yuvData.mCbCrSize.width << ","
<< yuvData.mCbCrSize.height << ", " << yuvData.mYStride
<< yuvDesc.ySize().width << "," << yuvDesc.ySize().height
<< ", " << yuvDesc.cbCrSize().width << ","
<< yuvDesc.cbCrSize().height << ", " << yuvData.mYStride
<< "," << yuvData.mCbCrStride << ", "
<< yuvData.mYChannel << "," << yuvData.mCbChannel << ","
<< yuvData.mCrChannel;
@ -581,16 +577,18 @@ nsresult PlanarYCbCrImage::BuildSurfaceDescriptorBuffer(
MOZ_ASSERT(pdata->mYSkip == 0 && pdata->mCbSkip == 0 && pdata->mCrSkip == 0,
"YCbCrDescriptor doesn't hold skip values");
auto ySize = pdata->YDataSize();
auto cbcrSize = pdata->CbCrDataSize();
uint32_t yOffset;
uint32_t cbOffset;
uint32_t crOffset;
ImageDataSerializer::ComputeYCbCrOffsets(
pdata->mYStride, pdata->mYSize.height, pdata->mCbCrStride,
pdata->mCbCrSize.height, yOffset, cbOffset, crOffset);
ImageDataSerializer::ComputeYCbCrOffsets(pdata->mYStride, ySize.height,
pdata->mCbCrStride, cbcrSize.height,
yOffset, cbOffset, crOffset);
uint32_t bufferSize = ImageDataSerializer::ComputeYCbCrBufferSize(
pdata->mYSize, pdata->mYStride, pdata->mCbCrSize, pdata->mCbCrStride,
yOffset, cbOffset, crOffset);
ySize, pdata->mYStride, cbcrSize, pdata->mCbCrStride, yOffset, cbOffset,
crOffset);
aSdBuffer.data() = aAllocate(bufferSize);
@ -612,16 +610,16 @@ nsresult PlanarYCbCrImage::BuildSurfaceDescriptorBuffer(
}
aSdBuffer.desc() = YCbCrDescriptor(
pdata->GetPictureRect(), pdata->mYSize, pdata->mYStride, pdata->mCbCrSize,
pdata->mCbCrStride, yOffset, cbOffset, crOffset, pdata->mStereoMode,
pdata->mColorDepth, pdata->mYUVColorSpace, pdata->mColorRange);
pdata->mPictureRect, ySize, pdata->mYStride, cbcrSize, pdata->mCbCrStride,
yOffset, cbOffset, crOffset, pdata->mStereoMode, pdata->mColorDepth,
pdata->mYUVColorSpace, pdata->mColorRange, pdata->mChromaSubsampling);
CopyPlane(buffer + yOffset, pdata->mYChannel, pdata->mYSize, pdata->mYStride,
CopyPlane(buffer + yOffset, pdata->mYChannel, ySize, pdata->mYStride,
pdata->mYSkip);
CopyPlane(buffer + cbOffset, pdata->mCbChannel, pdata->mCbCrSize,
pdata->mCbCrStride, pdata->mCbSkip);
CopyPlane(buffer + crOffset, pdata->mCrChannel, pdata->mCbCrSize,
pdata->mCbCrStride, pdata->mCrSkip);
CopyPlane(buffer + cbOffset, pdata->mCbChannel, cbcrSize, pdata->mCbCrStride,
pdata->mCbSkip);
CopyPlane(buffer + crOffset, pdata->mCrChannel, cbcrSize, pdata->mCbCrStride,
pdata->mCrSkip);
return NS_OK;
}
@ -682,9 +680,11 @@ static void CopyPlane(uint8_t* aDst, const uint8_t* aSrc,
bool RecyclingPlanarYCbCrImage::CopyData(const Data& aData) {
// update buffer size
// Use uint32_t throughout to match AllocateBuffer's param and mBufferSize
auto ySize = aData.YDataSize();
auto cbcrSize = aData.CbCrDataSize();
const auto checkedSize =
CheckedInt<uint32_t>(aData.mCbCrStride) * aData.mCbCrSize.height * 2 +
CheckedInt<uint32_t>(aData.mYStride) * aData.mYSize.height;
CheckedInt<uint32_t>(aData.mCbCrStride) * cbcrSize.height * 2 +
CheckedInt<uint32_t>(aData.mYStride) * ySize.height;
if (!checkedSize.isValid()) return false;
@ -699,20 +699,19 @@ bool RecyclingPlanarYCbCrImage::CopyData(const Data& aData) {
mData = aData;
mData.mYChannel = mBuffer.get();
mData.mCbChannel = mData.mYChannel + mData.mYStride * mData.mYSize.height;
mData.mCrChannel =
mData.mCbChannel + mData.mCbCrStride * mData.mCbCrSize.height;
mData.mCbChannel = mData.mYChannel + mData.mYStride * ySize.height;
mData.mCrChannel = mData.mCbChannel + mData.mCbCrStride * cbcrSize.height;
mData.mYSkip = mData.mCbSkip = mData.mCrSkip = 0;
CopyPlane(mData.mYChannel, aData.mYChannel, aData.mYSize, aData.mYStride,
CopyPlane(mData.mYChannel, aData.mYChannel, ySize, aData.mYStride,
aData.mYSkip);
CopyPlane(mData.mCbChannel, aData.mCbChannel, aData.mCbCrSize,
aData.mCbCrStride, aData.mCbSkip);
CopyPlane(mData.mCrChannel, aData.mCrChannel, aData.mCbCrSize,
aData.mCbCrStride, aData.mCrSkip);
CopyPlane(mData.mCbChannel, aData.mCbChannel, cbcrSize, aData.mCbCrStride,
aData.mCbSkip);
CopyPlane(mData.mCrChannel, aData.mCrChannel, cbcrSize, aData.mCbCrStride,
aData.mCrSkip);
mSize = aData.mPicSize;
mOrigin = gfx::IntPoint(aData.mPicX, aData.mPicY);
mSize = aData.mPictureRect.Size();
mOrigin = aData.mPictureRect.TopLeft();
return true;
}
@ -723,8 +722,8 @@ gfxImageFormat PlanarYCbCrImage::GetOffscreenFormat() const {
bool PlanarYCbCrImage::AdoptData(const Data& aData) {
mData = aData;
mSize = aData.mPicSize;
mOrigin = gfx::IntPoint(aData.mPicX, aData.mPicY);
mSize = aData.mPictureRect.Size();
mOrigin = aData.mPictureRect.TopLeft();
return true;
}
@ -776,7 +775,7 @@ NVImage::~NVImage() {
IntSize NVImage::GetSize() const { return mSize; }
IntRect NVImage::GetPictureRect() const { return mData.GetPictureRect(); }
IntRect NVImage::GetPictureRect() const { return mData.mPictureRect; }
already_AddRefed<SourceSurface> NVImage::GetAsSourceSurface() {
if (mSourceSurface) {
@ -786,31 +785,30 @@ already_AddRefed<SourceSurface> NVImage::GetAsSourceSurface() {
// Convert the current NV12 or NV21 data to YUV420P so that we can follow the
// logics in PlanarYCbCrImage::GetAsSourceSurface().
const int bufferLength = mData.mYSize.height * mData.mYStride +
mData.mCbCrSize.height * mData.mCbCrSize.width * 2;
auto ySize = mData.YDataSize();
auto cbcrSize = mData.CbCrDataSize();
const int bufferLength =
ySize.height * mData.mYStride + cbcrSize.height * cbcrSize.width * 2;
UniquePtr<uint8_t[]> buffer(new uint8_t[bufferLength]);
Data aData = mData;
aData.mCbCrStride = aData.mCbCrSize.width;
aData.mCbCrStride = cbcrSize.width;
aData.mCbSkip = 0;
aData.mCrSkip = 0;
aData.mYChannel = buffer.get();
aData.mCbChannel = aData.mYChannel + aData.mYSize.height * aData.mYStride;
aData.mCrChannel =
aData.mCbChannel + aData.mCbCrSize.height * aData.mCbCrStride;
aData.mCbChannel = aData.mYChannel + ySize.height * aData.mYStride;
aData.mCrChannel = aData.mCbChannel + cbcrSize.height * aData.mCbCrStride;
if (mData.mCbChannel < mData.mCrChannel) { // NV12
libyuv::NV12ToI420(mData.mYChannel, mData.mYStride, mData.mCbChannel,
mData.mCbCrStride, aData.mYChannel, aData.mYStride,
aData.mCbChannel, aData.mCbCrStride, aData.mCrChannel,
aData.mCbCrStride, aData.mYSize.width,
aData.mYSize.height);
aData.mCbCrStride, ySize.width, ySize.height);
} else { // NV21
libyuv::NV21ToI420(mData.mYChannel, mData.mYStride, mData.mCrChannel,
mData.mCbCrStride, aData.mYChannel, aData.mYStride,
aData.mCbChannel, aData.mCbCrStride, aData.mCrChannel,
aData.mCbCrStride, aData.mYSize.width,
aData.mYSize.height);
aData.mCbCrStride, ySize.width, ySize.height);
}
// The logics in PlanarYCbCrImage::GetAsSourceSurface().
@ -856,8 +854,8 @@ bool NVImage::SetData(const Data& aData) {
// Calculate buffer size
// Use uint32_t throughout to match AllocateBuffer's param and mBufferSize
const auto checkedSize =
CheckedInt<uint32_t>(aData.mYSize.height) * aData.mYStride +
CheckedInt<uint32_t>(aData.mCbCrSize.height) * aData.mCbCrStride;
CheckedInt<uint32_t>(aData.YDataSize().height) * aData.mYStride +
CheckedInt<uint32_t>(aData.CbCrDataSize().height) * aData.mCbCrStride;
if (!checkedSize.isValid()) return false;
@ -879,7 +877,7 @@ bool NVImage::SetData(const Data& aData) {
mData.mCrChannel = mData.mYChannel + (aData.mCrChannel - aData.mYChannel);
// Update mSize.
mSize = aData.mPicSize;
mSize = aData.mPictureRect.Size();
// Copy the input data into mBuffer.
// This copies the y-channel and the interleaving CbCr-channel.

Просмотреть файл

@ -638,26 +638,39 @@ struct PlanarYCbCrData {
// Luminance buffer
uint8_t* mYChannel = nullptr;
int32_t mYStride = 0;
gfx::IntSize mYSize = gfx::IntSize(0, 0);
int32_t mYSkip = 0;
// Chroma buffers
uint8_t* mCbChannel = nullptr;
uint8_t* mCrChannel = nullptr;
int32_t mCbCrStride = 0;
gfx::IntSize mCbCrSize = gfx::IntSize(0, 0);
int32_t mCbSkip = 0;
int32_t mCrSkip = 0;
// Picture region
uint32_t mPicX = 0;
uint32_t mPicY = 0;
gfx::IntSize mPicSize = gfx::IntSize(0, 0);
gfx::IntRect mPictureRect = gfx::IntRect(0, 0, 0, 0);
StereoMode mStereoMode = StereoMode::MONO;
gfx::ColorDepth mColorDepth = gfx::ColorDepth::COLOR_8;
gfx::YUVColorSpace mYUVColorSpace = gfx::YUVColorSpace::Default;
gfx::ColorRange mColorRange = gfx::ColorRange::LIMITED;
gfx::ChromaSubsampling mChromaSubsampling = gfx::ChromaSubsampling::FULL;
gfx::IntRect GetPictureRect() const {
return gfx::IntRect(mPicX, mPicY, mPicSize.width, mPicSize.height);
// The cropped picture size of the Y channel.
gfx::IntSize YPictureSize() const { return mPictureRect.Size(); }
// The cropped picture size of the Cb/Cr channels.
gfx::IntSize CbCrPictureSize() const {
return mCbCrStride > 0 ? gfx::ChromaSize(YPictureSize(), mChromaSubsampling)
: gfx::IntSize(0, 0);
}
// The total uncropped size of data in the Y channel.
gfx::IntSize YDataSize() const {
return gfx::IntSize(mPictureRect.XMost(), mPictureRect.YMost());
}
// The total uncropped size of data in the Cb/Cr channels.
gfx::IntSize CbCrDataSize() const {
return mCbCrStride > 0 ? gfx::ChromaSize(YDataSize(), mChromaSubsampling)
: gfx::IntSize(0, 0);
}
static Maybe<PlanarYCbCrData> From(const SurfaceDescriptorBuffer&);
@ -684,12 +697,9 @@ struct PlanarAlphaData {
* 4:2:2 - CbCr width is half that of Y. Height is the same.
* 4:2:0 - CbCr width and height is half that of Y.
*
* The color format is detected based on the height/width ratios
* defined above.
* mChromaSubsampling specifies which YCbCr subsampling scheme to use.
*
* The Image that is rendered is the picture region defined by
* mPicX, mPicY and mPicSize. The size of the rendered image is
* mPicSize, not mYSize or mCbCrSize.
* The Image that is rendered is the picture region defined by mPictureRect.
*
* mYSkip, mCbSkip, mCrSkip are added to support various output
* formats from hardware decoder. They are per-pixel skips in the
@ -699,7 +709,7 @@ struct PlanarAlphaData {
* the mYChannel buffer looks like:
*
* |<----------------------- mYStride ----------------------------->|
* |<----------------- mYSize.width --------------->|
* |<----------------- YDataSize().width ---------->|
* 0 3 6 9 12 15 18 21 639 669
* |----------------------------------------------------------------|
* |Y___Y___Y___Y___Y___Y___Y___Y... |%%%%%%%%%%%%%%%|
@ -730,7 +740,13 @@ class PlanarYCbCrImage : public Image {
/**
* This will create an empty data buffers according to the input data's size.
*/
virtual bool CreateEmptyBuffer(const Data& aData) { return false; }
virtual bool CreateEmptyBuffer(const Data& aData, const gfx::IntSize& aYSize,
const gfx::IntSize& aCbCrSize) {
return false;
}
bool CreateEmptyBuffer(const Data& aData) {
return CreateEmptyBuffer(aData, aData.YDataSize(), aData.CbCrDataSize());
}
/**
* Ask this Image to not convert YUV to RGB during SetData, and make

Просмотреть файл

@ -256,6 +256,18 @@ Maybe<StereoMode> StereoModeFromBufferDescriptor(
}
}
Maybe<gfx::ChromaSubsampling> ChromaSubsamplingFromBufferDescriptor(
const BufferDescriptor& aDescriptor) {
switch (aDescriptor.type()) {
case BufferDescriptor::TRGBDescriptor:
return Nothing();
case BufferDescriptor::TYCbCrDescriptor:
return Some(aDescriptor.get_YCbCrDescriptor().chromaSubsampling());
default:
MOZ_CRASH("GFX: ChromaSubsamplingFromBufferDescriptor");
}
}
uint8_t* GetYChannel(uint8_t* aBuffer, const YCbCrDescriptor& aDescriptor) {
return aBuffer + aDescriptor.yOffset();
}
@ -299,16 +311,13 @@ already_AddRefed<DataSourceSurface> DataSourceSurfaceFromYCbCrDescriptor(
layers::PlanarYCbCrData ycbcrData;
ycbcrData.mYChannel = GetYChannel(aBuffer, aDescriptor);
ycbcrData.mYStride = aDescriptor.yStride();
ycbcrData.mYSize = aDescriptor.ySize();
ycbcrData.mCbChannel = GetCbChannel(aBuffer, aDescriptor);
ycbcrData.mCrChannel = GetCrChannel(aBuffer, aDescriptor);
ycbcrData.mCbCrStride = aDescriptor.cbCrStride();
ycbcrData.mCbCrSize = aDescriptor.cbCrSize();
ycbcrData.mPicSize = size;
ycbcrData.mPicX = display.X();
ycbcrData.mPicY = display.Y();
ycbcrData.mPictureRect = aDescriptor.display();
ycbcrData.mYUVColorSpace = aDescriptor.yUVColorSpace();
ycbcrData.mColorDepth = aDescriptor.colorDepth();
ycbcrData.mChromaSubsampling = aDescriptor.chromaSubsampling();
gfx::ConvertYCbCrToRGB(ycbcrData, gfx::SurfaceFormat::B8G8R8X8, size,
map.mData, map.mStride);
@ -325,29 +334,24 @@ void ConvertAndScaleFromYCbCrDescriptor(uint8_t* aBuffer,
int32_t aStride) {
MOZ_ASSERT(aBuffer);
const gfx::IntRect display = aDescriptor.display();
layers::PlanarYCbCrData ycbcrData;
ycbcrData.mYChannel = GetYChannel(aBuffer, aDescriptor);
ycbcrData.mYStride = aDescriptor.yStride();
ycbcrData.mYSize = aDescriptor.ySize();
ycbcrData.mCbChannel = GetCbChannel(aBuffer, aDescriptor);
ycbcrData.mCrChannel = GetCrChannel(aBuffer, aDescriptor);
ycbcrData.mCbCrStride = aDescriptor.cbCrStride();
ycbcrData.mCbCrSize = aDescriptor.cbCrSize();
ycbcrData.mPicSize = display.Size();
ycbcrData.mPicX = display.X();
ycbcrData.mPicY = display.Y();
ycbcrData.mPictureRect = aDescriptor.display();
ycbcrData.mYUVColorSpace = aDescriptor.yUVColorSpace();
ycbcrData.mColorDepth = aDescriptor.colorDepth();
ycbcrData.mChromaSubsampling = aDescriptor.chromaSubsampling();
gfx::ConvertYCbCrToRGB(ycbcrData, aDestFormat, aDestSize, aDestBuffer,
aStride);
}
gfx::IntSize GetCroppedCbCrSize(const YCbCrDescriptor& aDescriptor) {
return gfx::GetCroppedCbCrSize(aDescriptor.ySize(), aDescriptor.cbCrSize(),
aDescriptor.display().Size());
return ChromaSize(aDescriptor.display().Size(),
aDescriptor.chromaSubsampling());
}
} // namespace ImageDataSerializer

Просмотреть файл

@ -83,6 +83,9 @@ Maybe<gfx::ColorRange> ColorRangeFromBufferDescriptor(
Maybe<StereoMode> StereoModeFromBufferDescriptor(
const BufferDescriptor& aDescriptor);
Maybe<gfx::ChromaSubsampling> ChromaSubsamplingFromBufferDescriptor(
const BufferDescriptor& aDescriptor);
uint8_t* GetYChannel(uint8_t* aBuffer, const YCbCrDescriptor& aDescriptor);
uint8_t* GetCbChannel(uint8_t* aBuffer, const YCbCrDescriptor& aDescriptor);

Просмотреть файл

@ -24,6 +24,7 @@ CreateSourceSurfaceFromLockedMacIOSurface(MacIOSurface* aSurface) {
size_t bytesPerRow = aSurface->GetBytesPerRow();
size_t ioWidth = aSurface->GetDevicePixelWidth();
size_t ioHeight = aSurface->GetDevicePixelHeight();
IntSize size(int32_t(ioWidth), int32_t(ioHeight));
SurfaceFormat ioFormat = aSurface->GetFormat();
if ((ioFormat == SurfaceFormat::NV12 || ioFormat == SurfaceFormat::YUV422) &&
@ -37,8 +38,8 @@ CreateSourceSurfaceFromLockedMacIOSurface(MacIOSurface* aSurface) {
? SurfaceFormat::B8G8R8X8
: SurfaceFormat::B8G8R8A8;
RefPtr<DataSourceSurface> dataSurface = Factory::CreateDataSourceSurface(
IntSize::Truncate(ioWidth, ioHeight), format);
RefPtr<DataSourceSurface> dataSurface =
Factory::CreateDataSourceSurface(size, format);
if (NS_WARN_IF(!dataSurface)) {
return nullptr;
}
@ -77,23 +78,20 @@ CreateSourceSurfaceFromLockedMacIOSurface(MacIOSurface* aSurface) {
PlanarYCbCrData data;
data.mYChannel = (uint8_t*)aSurface->GetBaseAddressOfPlane(0);
data.mYStride = aSurface->GetBytesPerRow(0);
data.mYSize = IntSize::Truncate(ioWidth, ioHeight);
data.mCbChannel = cbPlane.get();
data.mCrChannel = crPlane.get();
data.mCbCrStride = cbCrWidth;
data.mCbCrSize = IntSize::Truncate(cbCrWidth, cbCrHeight);
data.mPicSize = data.mYSize;
data.mPictureRect = IntRect(IntPoint(0, 0), size);
data.mYUVColorSpace = aSurface->GetYUVColorSpace();
data.mColorRange = aSurface->IsFullRange() ? gfx::ColorRange::FULL
: gfx::ColorRange::LIMITED;
data.mChromaSubsampling = ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
ConvertYCbCrToRGB(data, SurfaceFormat::B8G8R8X8,
IntSize::Truncate(ioWidth, ioHeight), mappedSurface.mData,
ConvertYCbCrToRGB(data, SurfaceFormat::B8G8R8X8, size, mappedSurface.mData,
mappedSurface.mStride);
} else if (ioFormat == SurfaceFormat::YUV422) {
if (ioWidth == ALIGNED_32(ioWidth)) {
// Optimization when width is aligned to 32.
IntSize size = IntSize::Truncate(ioWidth, ioHeight);
libyuv::ConvertToARGB((uint8_t*)aSurface->GetBaseAddress(),
0 /* not used */, mappedSurface.mData,
mappedSurface.mStride, 0, 0, size.width,
@ -144,18 +142,16 @@ CreateSourceSurfaceFromLockedMacIOSurface(MacIOSurface* aSurface) {
PlanarYCbCrData data;
data.mYChannel = ALIGNEDPTR_32(yPlane.get());
data.mYStride = cbCrStride * 2;
data.mYSize = IntSize::Truncate(ioWidth, ioHeight);
data.mCbChannel = ALIGNEDPTR_32(cbPlane.get());
data.mCrChannel = ALIGNEDPTR_32(crPlane.get());
data.mCbCrStride = cbCrStride;
data.mCbCrSize = IntSize::Truncate(cbCrWidth, cbCrHeight);
data.mPicSize = data.mYSize;
data.mPictureRect = IntRect(IntPoint(0, 0), size);
data.mYUVColorSpace = aSurface->GetYUVColorSpace();
data.mColorRange = aSurface->IsFullRange() ? gfx::ColorRange::FULL
: gfx::ColorRange::LIMITED;
data.mChromaSubsampling = ChromaSubsampling::HALF_WIDTH;
ConvertYCbCrToRGB(data, SurfaceFormat::B8G8R8X8,
IntSize::Truncate(ioWidth, ioHeight),
ConvertYCbCrToRGB(data, SurfaceFormat::B8G8R8X8, size,
mappedSurface.mData, mappedSurface.mStride);
}
} else {

Просмотреть файл

@ -46,37 +46,38 @@ bool MacIOSurfaceImage::SetData(ImageContainer* aContainer,
return false;
}
if (aData.mCbCrSize.width * 2 != aData.mYSize.width) {
return false;
}
// We can only support 4:2:2 and 4:2:0 formats currently.
if (aData.mCbCrSize.height != aData.mYSize.height &&
aData.mCbCrSize.height * 2 != aData.mYSize.height) {
return false;
switch (aData.mChromaSubsampling) {
case ChromaSubsampling::HALF_WIDTH:
case ChromaSubsampling::HALF_WIDTH_AND_HEIGHT:
break;
default:
return false;
}
RefPtr<MacIOSurfaceRecycleAllocator> allocator =
aContainer->GetMacIOSurfaceRecycleAllocator();
auto ySize = aData.YDataSize();
auto cbcrSize = aData.CbCrDataSize();
RefPtr<MacIOSurface> surf = allocator->Allocate(
aData.mYSize, aData.mCbCrSize, aData.mYUVColorSpace, aData.mColorRange);
ySize, cbcrSize, aData.mYUVColorSpace, aData.mColorRange);
surf->Lock(false);
if (surf->GetFormat() == SurfaceFormat::YUV422) {
// If the CbCrSize's height is half of the YSize's height, then we'll
// need to duplicate the CbCr data on every second row.
size_t heightScale = aData.mYSize.height / aData.mCbCrSize.height;
size_t heightScale = ySize.height / cbcrSize.height;
// The underlying IOSurface has format
// kCVPixelFormatType_422YpCbCr8FullRange or
// kCVPixelFormatType_422YpCbCr8_yuvs, which uses a 4:2:2 Y`0 Cb Y`1 Cr
// layout. See CVPixelBuffer.h for the full list of format descriptions.
MOZ_ASSERT(aData.mYSize.height > 0);
MOZ_ASSERT(ySize.height > 0);
uint8_t* dst = (uint8_t*)surf->GetBaseAddressOfPlane(0);
size_t stride = surf->GetBytesPerRow(0);
for (size_t i = 0; i < (size_t)aData.mYSize.height; i++) {
for (size_t i = 0; i < (size_t)ySize.height; i++) {
// Compute the row addresses. If the input was 4:2:0, then
// we divide i by 2, so that each source row of CbCr maps to
// two dest rows.
@ -89,7 +90,7 @@ bool MacIOSurfaceImage::SetData(ImageContainer* aContainer,
// Iterate across the CbCr width (which we have guaranteed to be half of
// the surface width), and write two 16bit pixels each time.
for (size_t j = 0; j < (size_t)aData.mCbCrSize.width; j++) {
for (size_t j = 0; j < (size_t)cbcrSize.width; j++) {
*rowDst = *rowYSrc;
rowDst++;
rowYSrc++;
@ -108,25 +109,25 @@ bool MacIOSurfaceImage::SetData(ImageContainer* aContainer,
}
}
} else if (surf->GetFormat() == SurfaceFormat::NV12) {
MOZ_ASSERT(aData.mYSize.height > 0);
MOZ_ASSERT(ySize.height > 0);
uint8_t* dst = (uint8_t*)surf->GetBaseAddressOfPlane(0);
size_t stride = surf->GetBytesPerRow(0);
for (size_t i = 0; i < (size_t)aData.mYSize.height; i++) {
for (size_t i = 0; i < (size_t)ySize.height; i++) {
uint8_t* rowSrc = aData.mYChannel + aData.mYStride * i;
uint8_t* rowDst = dst + stride * i;
memcpy(rowDst, rowSrc, aData.mYSize.width);
memcpy(rowDst, rowSrc, ySize.width);
}
// Copy and interleave the Cb and Cr channels.
MOZ_ASSERT(aData.mCbCrSize.height > 0);
MOZ_ASSERT(cbcrSize.height > 0);
dst = (uint8_t*)surf->GetBaseAddressOfPlane(1);
stride = surf->GetBytesPerRow(1);
for (size_t i = 0; i < (size_t)aData.mCbCrSize.height; i++) {
for (size_t i = 0; i < (size_t)cbcrSize.height; i++) {
uint8_t* rowCbSrc = aData.mCbChannel + aData.mCbCrStride * i;
uint8_t* rowCrSrc = aData.mCrChannel + aData.mCbCrStride * i;
uint8_t* rowDst = dst + stride * i;
for (size_t j = 0; j < (size_t)aData.mCbCrSize.width; j++) {
for (size_t j = 0; j < (size_t)cbcrSize.width; j++) {
*rowDst = *rowCbSrc;
rowDst++;
rowCbSrc++;
@ -140,7 +141,7 @@ bool MacIOSurfaceImage::SetData(ImageContainer* aContainer,
surf->Unlock(false);
mSurface = surf;
mPictureRect = aData.GetPictureRect();
mPictureRect = aData.mPictureRect;
return true;
}

Просмотреть файл

@ -95,10 +95,10 @@ already_AddRefed<TextureClient> ImageClient::CreateTextureClientForImage(
return nullptr;
}
texture = TextureClient::CreateForYCbCr(
aKnowsCompositor, data->GetPictureRect(), data->mYSize, data->mYStride,
data->mCbCrSize, data->mCbCrStride, data->mStereoMode,
aKnowsCompositor, data->mPictureRect, data->YDataSize(), data->mYStride,
data->CbCrDataSize(), data->mCbCrStride, data->mStereoMode,
data->mColorDepth, data->mYUVColorSpace, data->mColorRange,
TextureFlags::DEFAULT);
data->mChromaSubsampling, TextureFlags::DEFAULT);
if (!texture) {
return nullptr;
}

Просмотреть файл

@ -1279,7 +1279,8 @@ already_AddRefed<TextureClient> TextureClient::CreateForYCbCr(
const gfx::IntSize& aYSize, uint32_t aYStride,
const gfx::IntSize& aCbCrSize, uint32_t aCbCrStride, StereoMode aStereoMode,
gfx::ColorDepth aColorDepth, gfx::YUVColorSpace aYUVColorSpace,
gfx::ColorRange aColorRange, TextureFlags aTextureFlags) {
gfx::ColorRange aColorRange, gfx::ChromaSubsampling aSubsampling,
TextureFlags aTextureFlags) {
if (!aAllocator || !aAllocator->GetLayersIPCActor()->IPCOpen()) {
return nullptr;
}
@ -1290,7 +1291,8 @@ already_AddRefed<TextureClient> TextureClient::CreateForYCbCr(
TextureData* data = BufferTextureData::CreateForYCbCr(
aAllocator, aDisplay, aYSize, aYStride, aCbCrSize, aCbCrStride,
aStereoMode, aColorDepth, aYUVColorSpace, aColorRange, aTextureFlags);
aStereoMode, aColorDepth, aYUVColorSpace, aColorRange, aSubsampling,
aTextureFlags);
if (!data) {
return nullptr;
}
@ -1703,17 +1705,17 @@ bool UpdateYCbCrTextureClient(TextureClient* aTexture,
BytesPerPixel(SurfaceFormatForColorDepth(aData.mColorDepth));
MappedYCbCrTextureData srcData;
srcData.y.data = aData.mYChannel;
srcData.y.size = aData.mYSize;
srcData.y.size = aData.YDataSize();
srcData.y.stride = aData.mYStride;
srcData.y.skip = aData.mYSkip;
srcData.y.bytesPerPixel = bytesPerPixel;
srcData.cb.data = aData.mCbChannel;
srcData.cb.size = aData.mCbCrSize;
srcData.cb.size = aData.CbCrDataSize();
srcData.cb.stride = aData.mCbCrStride;
srcData.cb.skip = aData.mCbSkip;
srcData.cb.bytesPerPixel = bytesPerPixel;
srcData.cr.data = aData.mCrChannel;
srcData.cr.size = aData.mCbCrSize;
srcData.cr.size = aData.CbCrDataSize();
srcData.cr.stride = aData.mCbCrStride;
srcData.cr.skip = aData.mCrSkip;
srcData.cr.bytesPerPixel = bytesPerPixel;

Просмотреть файл

@ -362,7 +362,7 @@ class TextureClient : public AtomicRefCountedWithFinalize<TextureClient> {
const gfx::IntSize& aCbCrSize, uint32_t aCbCrStride,
StereoMode aStereoMode, gfx::ColorDepth aColorDepth,
gfx::YUVColorSpace aYUVColorSpace, gfx::ColorRange aColorRange,
TextureFlags aTextureFlags);
gfx::ChromaSubsampling aSubsampling, TextureFlags aTextureFlags);
// Creates and allocates a TextureClient (can be accessed through raw
// pointers).

Просмотреть файл

@ -68,11 +68,14 @@ class MOZ_RAII DefaultTextureClientAllocationHelper
};
YCbCrTextureClientAllocationHelper::YCbCrTextureClientAllocationHelper(
const PlanarYCbCrData& aData, TextureFlags aTextureFlags)
: ITextureClientAllocationHelper(gfx::SurfaceFormat::YUV, aData.mYSize,
const PlanarYCbCrData& aData, const gfx::IntSize& aYSize,
const gfx::IntSize& aCbCrSize, TextureFlags aTextureFlags)
: ITextureClientAllocationHelper(gfx::SurfaceFormat::YUV, aYSize,
BackendSelector::Content, aTextureFlags,
ALLOC_DEFAULT),
mData(aData) {}
mData(aData),
mYSize(aYSize),
mCbCrSize(aCbCrSize) {}
bool YCbCrTextureClientAllocationHelper::IsCompatible(
TextureClient* aTextureClient) {
@ -82,11 +85,11 @@ bool YCbCrTextureClientAllocationHelper::IsCompatible(
aTextureClient->GetInternalData()->AsBufferTextureData();
if (!bufferData ||
!bufferData->GetPictureRect().IsEqualEdges(mData.GetPictureRect()) ||
!bufferData->GetPictureRect().IsEqualEdges(mData.mPictureRect) ||
bufferData->GetYSize().isNothing() ||
bufferData->GetYSize().ref() != mData.mYSize ||
bufferData->GetYSize().ref() != mYSize ||
bufferData->GetCbCrSize().isNothing() ||
bufferData->GetCbCrSize().ref() != mData.mCbCrSize ||
bufferData->GetCbCrSize().ref() != mCbCrSize ||
bufferData->GetYStride().isNothing() ||
bufferData->GetYStride().ref() != mData.mYStride ||
bufferData->GetCbCrStride().isNothing() ||
@ -96,7 +99,9 @@ bool YCbCrTextureClientAllocationHelper::IsCompatible(
bufferData->GetColorDepth().isNothing() ||
bufferData->GetColorDepth().ref() != mData.mColorDepth ||
bufferData->GetStereoMode().isNothing() ||
bufferData->GetStereoMode().ref() != mData.mStereoMode) {
bufferData->GetStereoMode().ref() != mData.mStereoMode ||
bufferData->GetChromaSubsampling().isNothing() ||
bufferData->GetChromaSubsampling().ref() != mData.mChromaSubsampling) {
return false;
}
return true;
@ -105,9 +110,10 @@ bool YCbCrTextureClientAllocationHelper::IsCompatible(
already_AddRefed<TextureClient> YCbCrTextureClientAllocationHelper::Allocate(
KnowsCompositor* aKnowsCompositor) {
return TextureClient::CreateForYCbCr(
aKnowsCompositor, mData.GetPictureRect(), mData.mYSize, mData.mYStride,
mData.mCbCrSize, mData.mCbCrStride, mData.mStereoMode, mData.mColorDepth,
mData.mYUVColorSpace, mData.mColorRange, mTextureFlags);
aKnowsCompositor, mData.mPictureRect, mYSize, mData.mYStride, mCbCrSize,
mData.mCbCrStride, mData.mStereoMode, mData.mColorDepth,
mData.mYUVColorSpace, mData.mColorRange, mData.mChromaSubsampling,
mTextureFlags);
}
TextureClientRecycleAllocator::TextureClientRecycleAllocator(

Просмотреть файл

@ -62,8 +62,15 @@ class MOZ_RAII YCbCrTextureClientAllocationHelper
: public ITextureClientAllocationHelper {
public:
YCbCrTextureClientAllocationHelper(const PlanarYCbCrData& aData,
const gfx::IntSize& aYSize,
const gfx::IntSize& aCbCrSize,
TextureFlags aTextureFlags);
YCbCrTextureClientAllocationHelper(const PlanarYCbCrData& aData,
TextureFlags aTextureFlags)
: YCbCrTextureClientAllocationHelper(
aData, aData.YDataSize(), aData.CbCrDataSize(), aTextureFlags) {}
bool IsCompatible(TextureClient* aTextureClient) override;
already_AddRefed<TextureClient> Allocate(
@ -71,6 +78,8 @@ class MOZ_RAII YCbCrTextureClientAllocationHelper
protected:
const PlanarYCbCrData& mData;
const gfx::IntSize mYSize;
const gfx::IntSize mCbCrSize;
};
/**

Просмотреть файл

@ -13,6 +13,7 @@ using mozilla::WindowsHandle from "mozilla/ipc/IPCTypes.h";
using mozilla::gfx::YUVColorSpace from "mozilla/gfx/Types.h";
using mozilla::gfx::ColorDepth from "mozilla/gfx/Types.h";
using mozilla::gfx::ColorRange from "mozilla/gfx/Types.h";
using mozilla::gfx::ChromaSubsampling from "mozilla/gfx/Types.h";
using mozilla::gfx::SurfaceFormat from "mozilla/gfx/Types.h";
using mozilla::gfx::IntRect from "mozilla/gfx/Rect.h";
using mozilla::gfx::IntSize from "mozilla/gfx/Point.h";
@ -134,6 +135,7 @@ namespace layers {
ColorDepth colorDepth;
YUVColorSpace yUVColorSpace;
ColorRange colorRange;
ChromaSubsampling chromaSubsampling;
};
[Comparable] union BufferDescriptor {

Просмотреть файл

@ -81,11 +81,11 @@ SharedPlanarYCbCrImage::GetAsSourceSurface() {
}
bool SharedPlanarYCbCrImage::CopyData(const PlanarYCbCrData& aData) {
// If mTextureClient has not already been allocated (through Allocate(aData))
// allocate it. This code path is slower than the one used when Allocate has
// been called since it will trigger a full copy.
PlanarYCbCrData data = aData;
if (!mTextureClient && !Allocate(data)) {
// If mTextureClient has not already been allocated by CreateEmptyBuffer,
// allocate it. This code path is slower than the one used when
// CreateEmptyBuffer has been called since it will trigger a full copy.
if (!mTextureClient &&
!CreateEmptyBuffer(aData, aData.YDataSize(), aData.CbCrDataSize())) {
return false;
}
@ -108,22 +108,19 @@ bool SharedPlanarYCbCrImage::AdoptData(const Data& aData) {
return false;
}
bool SharedPlanarYCbCrImage::CreateEmptyBuffer(const Data& aData) {
auto data = aData;
return Allocate(data);
}
bool SharedPlanarYCbCrImage::IsValid() const {
return mTextureClient && mTextureClient->IsValid();
}
bool SharedPlanarYCbCrImage::Allocate(PlanarYCbCrData& aData) {
bool SharedPlanarYCbCrImage::CreateEmptyBuffer(const PlanarYCbCrData& aData,
const gfx::IntSize& aYSize,
const gfx::IntSize& aCbCrSize) {
MOZ_ASSERT(!mTextureClient, "This image already has allocated data");
TextureFlags flags =
mCompositable ? mCompositable->GetTextureFlags() : TextureFlags::DEFAULT;
{
YCbCrTextureClientAllocationHelper helper(aData, flags);
YCbCrTextureClientAllocationHelper helper(aData, aYSize, aCbCrSize, flags);
mTextureClient = RecycleAllocator()->CreateOrRecycle(helper);
}
@ -143,22 +140,15 @@ bool SharedPlanarYCbCrImage::Allocate(PlanarYCbCrData& aData) {
MOZ_CRASH("GFX: Cannot lock or borrow mapped YCbCr");
}
aData.mYChannel = mapped.y.data;
aData.mCbChannel = mapped.cb.data;
aData.mCrChannel = mapped.cr.data;
// copy some of aData's values in mData (most of them)
mData.mYChannel = aData.mYChannel;
mData.mCbChannel = aData.mCbChannel;
mData.mCrChannel = aData.mCrChannel;
mData.mYSize = aData.mYSize;
mData.mCbCrSize = aData.mCbCrSize;
mData.mPicX = aData.mPicX;
mData.mPicY = aData.mPicY;
mData.mPicSize = aData.mPicSize;
mData.mYChannel = mapped.y.data;
mData.mCbChannel = mapped.cb.data;
mData.mCrChannel = mapped.cr.data;
mData.mPictureRect = aData.mPictureRect;
mData.mStereoMode = aData.mStereoMode;
mData.mYUVColorSpace = aData.mYUVColorSpace;
mData.mColorDepth = aData.mColorDepth;
mData.mChromaSubsampling = aData.mChromaSubsampling;
// those members are not always equal to aData's, due to potentially different
// packing.
mData.mYSkip = 0;
@ -171,9 +161,9 @@ bool SharedPlanarYCbCrImage::Allocate(PlanarYCbCrData& aData) {
// will try to manage this memory without knowing it belongs to a
// shmem.
mBufferSize = ImageDataSerializer::ComputeYCbCrBufferSize(
mData.mYSize, mData.mYStride, mData.mCbCrSize, mData.mCbCrStride);
mSize = mData.mPicSize;
mOrigin = gfx::IntPoint(aData.mPicX, aData.mPicY);
aYSize, mData.mYStride, aCbCrSize, mData.mCbCrStride);
mSize = mData.mPictureRect.Size();
mOrigin = mData.mPictureRect.TopLeft();
mTextureClient->Unlock();

Просмотреть файл

@ -38,7 +38,8 @@ class SharedPlanarYCbCrImage : public PlanarYCbCrImage {
already_AddRefed<gfx::SourceSurface> GetAsSourceSurface() override;
bool CopyData(const PlanarYCbCrData& aData) override;
bool AdoptData(const Data& aData) override;
bool CreateEmptyBuffer(const Data& aData) override;
bool CreateEmptyBuffer(const Data& aData, const gfx::IntSize& aYSize,
const gfx::IntSize& aCbCrSize) override;
bool IsValid() const override;
@ -51,8 +52,6 @@ class SharedPlanarYCbCrImage : public PlanarYCbCrImage {
TextureClientRecycleAllocator* RecycleAllocator();
private:
bool Allocate(PlanarYCbCrData& aData);
RefPtr<TextureClient> mTextureClient;
RefPtr<ImageClient> mCompositable;
RefPtr<TextureClientRecycleAllocator> mRecycleAllocator;

Просмотреть файл

@ -169,8 +169,8 @@ void TestTextureClientYCbCr(TextureClient* client, PlanarYCbCrData& ycbcrData) {
auto bufferDesc = descriptor.get_SurfaceDescriptorBuffer();
ASSERT_EQ(bufferDesc.desc().type(), BufferDescriptor::TYCbCrDescriptor);
auto ycbcrDesc = bufferDesc.desc().get_YCbCrDescriptor();
ASSERT_EQ(ycbcrDesc.ySize(), ycbcrData.mYSize);
ASSERT_EQ(ycbcrDesc.cbCrSize(), ycbcrData.mCbCrSize);
ASSERT_EQ(ycbcrDesc.ySize(), ycbcrData.YDataSize());
ASSERT_EQ(ycbcrDesc.cbCrSize(), ycbcrData.CbCrDataSize());
ASSERT_EQ(ycbcrDesc.stereoMode(), ycbcrData.mStereoMode);
// host deserialization
@ -234,20 +234,17 @@ TEST(Layers, TextureYCbCrSerialization)
clientData.mYChannel = ySurface->Data();
clientData.mCbChannel = cbSurface->Data();
clientData.mCrChannel = crSurface->Data();
clientData.mYSize = ySurface->GetSize();
clientData.mPicSize = ySurface->GetSize();
clientData.mCbCrSize = cbSurface->GetSize();
clientData.mPictureRect = IntRect(IntPoint(0, 0), ySurface->GetSize());
clientData.mYStride = ySurface->Stride();
clientData.mCbCrStride = cbSurface->Stride();
clientData.mStereoMode = StereoMode::MONO;
clientData.mYUVColorSpace = YUVColorSpace::BT601;
clientData.mColorDepth = ColorDepth::COLOR_8;
clientData.mChromaSubsampling = ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
clientData.mYSkip = 0;
clientData.mCbSkip = 0;
clientData.mCrSkip = 0;
clientData.mCrSkip = 0;
clientData.mPicX = 0;
clientData.mPicX = 0;
uint32_t namespaceId = 1;
ImageBridgeChild::InitSameProcess(namespaceId);
@ -271,10 +268,11 @@ TEST(Layers, TextureYCbCrSerialization)
}
RefPtr<TextureClient> client = TextureClient::CreateForYCbCr(
imageBridge, clientData.GetPictureRect(), clientData.mYSize,
clientData.mYStride, clientData.mCbCrSize, clientData.mCbCrStride,
imageBridge, clientData.mPictureRect, clientData.YDataSize(),
clientData.mYStride, clientData.CbCrDataSize(), clientData.mCbCrStride,
StereoMode::MONO, ColorDepth::COLOR_8, YUVColorSpace::BT601,
ColorRange::LIMITED, TextureFlags::DEALLOCATE_CLIENT);
ColorRange::LIMITED, clientData.mChromaSubsampling,
TextureFlags::DEALLOCATE_CLIENT);
TestTextureClientYCbCr(client, clientData);

Просмотреть файл

@ -49,26 +49,24 @@ static already_AddRefed<TextureClient> CreateYCbCrTextureClientWithBackend(
clientData.mYChannel = ySurface->Data();
clientData.mCbChannel = cbSurface->Data();
clientData.mCrChannel = crSurface->Data();
clientData.mYSize = ySurface->GetSize();
clientData.mPicSize = ySurface->GetSize();
clientData.mCbCrSize = cbSurface->GetSize();
clientData.mPictureRect = IntRect(IntPoint(0, 0), ySurface->GetSize());
clientData.mYStride = ySurface->Stride();
clientData.mCbCrStride = cbSurface->Stride();
clientData.mStereoMode = StereoMode::MONO;
clientData.mChromaSubsampling = ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
clientData.mYSkip = 0;
clientData.mCbSkip = 0;
clientData.mCrSkip = 0;
clientData.mCrSkip = 0;
clientData.mPicX = 0;
clientData.mPicX = 0;
// Create YCbCrTexture for basic backend.
if (aLayersBackend == LayersBackend::LAYERS_BASIC) {
return TextureClient::CreateForYCbCr(
nullptr, clientData.GetPictureRect(), clientData.mYSize,
clientData.mYStride, clientData.mCbCrSize, clientData.mCbCrStride,
nullptr, clientData.mPictureRect, clientData.YDataSize(),
clientData.mYStride, clientData.CbCrDataSize(), clientData.mCbCrStride,
StereoMode::MONO, gfx::ColorDepth::COLOR_8, gfx::YUVColorSpace::BT601,
gfx::ColorRange::LIMITED, TextureFlags::DEALLOCATE_CLIENT);
gfx::ColorRange::LIMITED, clientData.mChromaSubsampling,
TextureFlags::DEALLOCATE_CLIENT);
}
if (data) {

Просмотреть файл

@ -17,32 +17,16 @@ namespace gfx {
// clang-format off
IntSize GetCroppedCbCrSize(const IntSize& aYSize,
const IntSize& aCbCrSize,
const IntSize& aDisplaySize) {
// The supplied ySize and cbcrSize are dimensions that may be padded for
// alignment. display holds the intended cropped display size of the data.
// The ySize can simply be limited by the display size. The cbcrSize must
// be cropped by checking if the uncropped size is approximately half, and
// then halving the cropped ySize since the uncropped sizes may be padded
// inconsistently.
IntSize croppedCbCrSize = Min(aDisplaySize, aCbCrSize);
if (aCbCrSize.height < aYSize.height &&
aCbCrSize.height >= aYSize.height / 2) {
croppedCbCrSize.width = (aDisplaySize.width + 1) / 2;
croppedCbCrSize.height = (aDisplaySize.height + 1) / 2;
} else if (aCbCrSize.width < aYSize.width &&
aCbCrSize.width >= aYSize.width / 2) {
croppedCbCrSize.width = (aDisplaySize.width + 1) / 2;
}
return croppedCbCrSize;
}
static YUVType GetYUVType(const layers::PlanarYCbCrData& aData) {
IntSize croppedCbCrSize =
GetCroppedCbCrSize(aData.mYSize, aData.mCbCrSize, aData.mPicSize);
return TypeFromSize(aData.mPicSize.width, aData.mPicSize.height,
croppedCbCrSize.width, croppedCbCrSize.height);
switch (aData.mChromaSubsampling) {
case ChromaSubsampling::FULL:
return aData.mCbCrStride > 0 ? YV24 : Y8;
case ChromaSubsampling::HALF_WIDTH:
return YV16;
case ChromaSubsampling::HALF_WIDTH_AND_HEIGHT:
return YV12;
}
MOZ_CRASH("Unknown chroma subsampling");
}
void
@ -55,23 +39,23 @@ GetYCbCrToRGBDestFormatAndSize(const layers::PlanarYCbCrData& aData,
// 'prescale' is true if the scaling is to be done as part of the
// YCbCr to RGB conversion rather than on the RGB data when rendered.
bool prescale = aSuggestedSize.width > 0 && aSuggestedSize.height > 0 &&
aSuggestedSize != aData.mPicSize;
aSuggestedSize != aData.mPictureRect.Size();
if (aSuggestedFormat == SurfaceFormat::R5G6B5_UINT16) {
#if defined(HAVE_YCBCR_TO_RGB565)
if (prescale &&
!IsScaleYCbCrToRGB565Fast(aData.mPicX,
aData.mPicY,
aData.mPicSize.width,
aData.mPicSize.height,
!IsScaleYCbCrToRGB565Fast(aData.mPictureRect.x,
aData.mPictureRect.y,
aData.mPictureRect.width,
aData.mPictureRect.height,
aSuggestedSize.width,
aSuggestedSize.height,
yuvtype,
FILTER_BILINEAR) &&
IsConvertYCbCrToRGB565Fast(aData.mPicX,
aData.mPicY,
aData.mPicSize.width,
aData.mPicSize.height,
IsConvertYCbCrToRGB565Fast(aData.mPictureRect.x,
aData.mPictureRect.y,
aData.mPictureRect.width,
aData.mPictureRect.height,
yuvtype)) {
prescale = false;
}
@ -87,11 +71,11 @@ GetYCbCrToRGBDestFormatAndSize(const layers::PlanarYCbCrData& aData,
if (aSuggestedFormat == SurfaceFormat::B8G8R8X8) {
/* ScaleYCbCrToRGB32 does not support a picture offset, nor 4:4:4 data.
See bugs 639415 and 640073. */
if (aData.mPicX != 0 || aData.mPicY != 0 || yuvtype == YV24)
if (aData.mPictureRect.TopLeft() != IntPoint(0, 0) || yuvtype == YV24)
prescale = false;
}
if (!prescale) {
aSuggestedSize = aData.mPicSize;
aSuggestedSize = aData.mPictureRect.Size();
}
}
@ -145,27 +129,26 @@ ConvertYCbCrToRGBInternal(const layers::PlanarYCbCrData& aData,
if (aData.mColorDepth != ColorDepth::COLOR_8) {
// Convert to 8 bits data first.
dstData.mPicSize = aData.mPicSize;
dstData.mPicX = aData.mPicX;
dstData.mPicY = aData.mPicY;
dstData.mYSize = aData.mYSize;
dstData.mPictureRect = aData.mPictureRect;
// We align the destination stride to 32 bytes, so that libyuv can use
// SSE optimised code.
dstData.mYStride = (aData.mYSize.width + 31) & ~31;
dstData.mCbCrSize = aData.mCbCrSize;
dstData.mCbCrStride = (aData.mCbCrSize.width + 31) & ~31;
auto ySize = aData.YDataSize();
auto cbcrSize = aData.CbCrDataSize();
dstData.mYStride = (ySize.width + 31) & ~31;
dstData.mCbCrStride = (cbcrSize.width + 31) & ~31;
dstData.mYUVColorSpace = aData.mYUVColorSpace;
dstData.mColorDepth = ColorDepth::COLOR_8;
dstData.mColorRange = aData.mColorRange;
dstData.mChromaSubsampling = aData.mChromaSubsampling;
size_t ySize = GetAlignedStride<1>(dstData.mYStride, aData.mYSize.height);
size_t cbcrSize =
GetAlignedStride<1>(dstData.mCbCrStride, aData.mCbCrSize.height);
if (ySize == 0) {
MOZ_DIAGNOSTIC_ASSERT(cbcrSize == 0, "CbCr without Y makes no sense");
size_t yMemorySize = GetAlignedStride<1>(dstData.mYStride, ySize.height);
size_t cbcrMemorySize =
GetAlignedStride<1>(dstData.mCbCrStride, cbcrSize.height);
if (yMemorySize == 0) {
MOZ_DIAGNOSTIC_ASSERT(cbcrMemorySize == 0, "CbCr without Y makes no sense");
return;
}
yChannel = MakeUnique<uint8_t[]>(ySize);
yChannel = MakeUnique<uint8_t[]>(yMemorySize);
dstData.mYChannel = yChannel.get();
@ -175,13 +158,13 @@ ConvertYCbCrToRGBInternal(const layers::PlanarYCbCrData& aData,
dstData.mYStride,
reinterpret_cast<uint16_t*>(aData.mYChannel),
aData.mYStride / 2,
aData.mYSize.width,
aData.mYSize.height,
ySize.width,
ySize.height,
bitDepth);
if (cbcrSize) {
cbChannel = MakeUnique<uint8_t[]>(cbcrSize);
crChannel = MakeUnique<uint8_t[]>(cbcrSize);
if (cbcrMemorySize) {
cbChannel = MakeUnique<uint8_t[]>(cbcrMemorySize);
crChannel = MakeUnique<uint8_t[]>(cbcrMemorySize);
dstData.mCbChannel = cbChannel.get();
dstData.mCrChannel = crChannel.get();
@ -190,32 +173,32 @@ ConvertYCbCrToRGBInternal(const layers::PlanarYCbCrData& aData,
dstData.mCbCrStride,
reinterpret_cast<uint16_t*>(aData.mCbChannel),
aData.mCbCrStride / 2,
aData.mCbCrSize.width,
aData.mCbCrSize.height,
cbcrSize.width,
cbcrSize.height,
bitDepth);
ConvertYCbCr16to8Line(dstData.mCrChannel,
dstData.mCbCrStride,
reinterpret_cast<uint16_t*>(aData.mCrChannel),
aData.mCbCrStride / 2,
aData.mCbCrSize.width,
aData.mCbCrSize.height,
cbcrSize.width,
cbcrSize.height,
bitDepth);
}
}
// Convert from YCbCr to RGB now, scaling the image if needed.
if (aDestSize != srcData.mPicSize) {
if (aDestSize != srcData.mPictureRect.Size()) {
#if defined(HAVE_YCBCR_TO_RGB565)
if (aDestFormat == SurfaceFormat::R5G6B5_UINT16) {
ScaleYCbCrToRGB565(srcData.mYChannel,
srcData.mCbChannel,
srcData.mCrChannel,
aDestBuffer,
srcData.mPicX,
srcData.mPicY,
srcData.mPicSize.width,
srcData.mPicSize.height,
srcData.mPictureRect.x,
srcData.mPictureRect.y,
srcData.mPictureRect.width,
srcData.mPictureRect.height,
aDestSize.width,
aDestSize.height,
srcData.mYStride,
@ -229,8 +212,8 @@ ConvertYCbCrToRGBInternal(const layers::PlanarYCbCrData& aData,
srcData.mCbChannel,
srcData.mCrChannel,
aDestBuffer,
srcData.mPicSize.width,
srcData.mPicSize.height,
srcData.mPictureRect.width,
srcData.mPictureRect.height,
aDestSize.width,
aDestSize.height,
srcData.mYStride,
@ -246,10 +229,10 @@ ConvertYCbCrToRGBInternal(const layers::PlanarYCbCrData& aData,
srcData.mCbChannel,
srcData.mCrChannel,
aDestBuffer,
srcData.mPicX,
srcData.mPicY,
srcData.mPicSize.width,
srcData.mPicSize.height,
srcData.mPictureRect.x,
srcData.mPictureRect.y,
srcData.mPictureRect.width,
srcData.mPictureRect.height,
srcData.mYStride,
srcData.mCbCrStride,
aStride,
@ -260,10 +243,10 @@ ConvertYCbCrToRGBInternal(const layers::PlanarYCbCrData& aData,
srcData.mCbChannel,
srcData.mCrChannel,
aDestBuffer,
srcData.mPicX,
srcData.mPicY,
srcData.mPicSize.width,
srcData.mPicSize.height,
srcData.mPictureRect.x,
srcData.mPictureRect.y,
srcData.mPictureRect.width,
srcData.mPictureRect.height,
srcData.mYStride,
srcData.mCbCrStride,
aStride,
@ -281,10 +264,11 @@ void ConvertYCbCrToRGB(const layers::PlanarYCbCrData& aData,
aStride);
#if MOZ_BIG_ENDIAN()
// libyuv makes endian-correct result, which needs to be swapped to BGRX
if (aDestFormat != SurfaceFormat::R5G6B5_UINT16)
if (aDestFormat != SurfaceFormat::R5G6B5_UINT16) {
gfx::SwizzleData(aDestBuffer, aStride, gfx::SurfaceFormat::X8R8G8B8,
aDestBuffer, aStride, gfx::SurfaceFormat::B8G8R8X8,
aData.mPicSize);
aDestSize);
}
#endif
}
@ -316,7 +300,7 @@ void ConvertYCbCrAToARGB(const layers::PlanarYCbCrData& aYCbCr,
int32_t aStride, PremultFunc premultiplyAlphaOp) {
// libyuv makes endian-correct result, so the format needs to be B8G8R8A8.
MOZ_ASSERT(aDestFormat == SurfaceFormat::B8G8R8A8);
MOZ_ASSERT(aAlpha.mSize == aYCbCr.mYSize);
MOZ_ASSERT(aAlpha.mSize == aYCbCr.YDataSize());
// libyuv has libyuv::I420AlphaToARGB, but lacks support for 422 and 444.
// Until that's added, we'll rely on our own code to handle this more
@ -355,12 +339,12 @@ void ConvertYCbCrAToARGB(const layers::PlanarYCbCrData& aYCbCr,
MOZ_ASSERT(alphaChannel8bpp);
FillAlphaToRGBA(alphaChannel8bpp, alphaStride8bpp, aDestBuffer,
aYCbCr.mPicSize.width, aYCbCr.mPicSize.height, aDestFormat);
aYCbCr.mPictureRect.width, aYCbCr.mPictureRect.height, aDestFormat);
if (premultiplyAlphaOp) {
DebugOnly<int> err =
premultiplyAlphaOp(aDestBuffer, aStride, aDestBuffer, aStride,
aYCbCr.mPicSize.width, aYCbCr.mPicSize.height);
aYCbCr.mPictureRect.width, aYCbCr.mPictureRect.height);
MOZ_ASSERT(!err);
}
@ -368,7 +352,7 @@ void ConvertYCbCrAToARGB(const layers::PlanarYCbCrData& aYCbCr,
// libyuv makes endian-correct result, which needs to be swapped to BGRA
gfx::SwizzleData(aDestBuffer, aStride, gfx::SurfaceFormat::A8R8G8B8,
aDestBuffer, aStride, gfx::SurfaceFormat::B8G8R8A8,
aYCbCr.mPicSize);
aYCbCr.mPictureRect.Size());
#endif
}

Просмотреть файл

@ -12,9 +12,6 @@
namespace mozilla {
namespace gfx {
IntSize GetCroppedCbCrSize(const IntSize& aYSize, const IntSize& aCbCrSize,
const IntSize& aDisplaySize);
void
GetYCbCrToRGBDestFormatAndSize(const layers::PlanarYCbCrData& aData,
SurfaceFormat& aSuggestedFormat,

Просмотреть файл

@ -38,28 +38,6 @@ const int kFractionMask = ((1 << kFractionBits) - 1);
// clang-format off
YUVType TypeFromSize(int ywidth,
int yheight,
int cbcrwidth,
int cbcrheight)
{
if (ywidth == cbcrwidth && yheight == cbcrheight) {
return YV24;
}
else if ((ywidth + 1) / 2 == cbcrwidth && yheight == cbcrheight) {
return YV16;
}
else if ((ywidth + 1) / 2 == cbcrwidth && (yheight + 1) / 2 == cbcrheight) {
return YV12;
}
else if (cbcrwidth == 0 && cbcrheight == 0) {
return Y8;
}
else {
MOZ_CRASH("Can't determine YUV type from size");
}
}
libyuv::FourCC FourCCFromYUVType(YUVType aYUVType) {
switch (aYUVType) {
case YV24: return libyuv::FOURCC_I444;

Просмотреть файл

@ -44,8 +44,6 @@ enum ScaleFilter {
FILTER_BILINEAR = 3 // Bilinear filter.
};
YUVType TypeFromSize(int ywidth, int yheight, int cbcrwidth, int cbcrheight);
// Convert a frame of YUV to 32 bit ARGB.
// Pass in YV16/YV12 depending on source format
void ConvertYCbCrToRGB32(const uint8* yplane,

Просмотреть файл

@ -960,7 +960,6 @@ AVIFDecodedData Dav1dDecoder::Dav1dPictureToDecodedData(
data.mYChannel = static_cast<uint8_t*>(aPicture->data[0]);
data.mYStride = aPicture->stride[0];
data.mYSize = gfx::IntSize(aPicture->p.w, aPicture->p.h);
data.mYSkip = aPicture->stride[0] - aPicture->p.w;
data.mCbChannel = static_cast<uint8_t*>(aPicture->data[1]);
data.mCrChannel = static_cast<uint8_t*>(aPicture->data[2]);
@ -968,17 +967,14 @@ AVIFDecodedData Dav1dDecoder::Dav1dPictureToDecodedData(
switch (aPicture->p.layout) {
case DAV1D_PIXEL_LAYOUT_I400: // Monochrome, so no Cb or Cr channels
data.mCbCrSize = gfx::IntSize(0, 0);
break;
case DAV1D_PIXEL_LAYOUT_I420:
data.mCbCrSize =
gfx::IntSize((aPicture->p.w + 1) / 2, (aPicture->p.h + 1) / 2);
data.mChromaSubsampling = ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
break;
case DAV1D_PIXEL_LAYOUT_I422:
data.mCbCrSize = gfx::IntSize((aPicture->p.w + 1) / 2, aPicture->p.h);
data.mChromaSubsampling = ChromaSubsampling::HALF_WIDTH;
break;
case DAV1D_PIXEL_LAYOUT_I444:
data.mCbCrSize = gfx::IntSize(aPicture->p.w, aPicture->p.h);
break;
default:
MOZ_ASSERT_UNREACHABLE("Unknown pixel layout");
@ -986,9 +982,7 @@ AVIFDecodedData Dav1dDecoder::Dav1dPictureToDecodedData(
data.mCbSkip = aPicture->stride[1] - aPicture->p.w;
data.mCrSkip = aPicture->stride[1] - aPicture->p.w;
data.mPicX = 0;
data.mPicY = 0;
data.mPicSize = data.mYSize;
data.mPictureRect = IntRect(0, 0, aPicture->p.w, aPicture->p.h);
data.mStereoMode = StereoMode::MONO;
data.mColorDepth = ColorDepthForBitDepth(aPicture->p.bpc);
@ -1059,25 +1053,27 @@ AVIFDecodedData AOMDecoder::AOMImageToToDecodedData(
data.mYChannel = aImage->planes[AOM_PLANE_Y];
data.mYStride = aImage->stride[AOM_PLANE_Y];
data.mYSize = gfx::IntSize(aom_img_plane_width(aImage, AOM_PLANE_Y),
aom_img_plane_height(aImage, AOM_PLANE_Y));
data.mYSkip =
aImage->stride[AOM_PLANE_Y] - aom_img_plane_width(aImage, AOM_PLANE_Y);
data.mCbChannel = aImage->planes[AOM_PLANE_U];
data.mCrChannel = aImage->planes[AOM_PLANE_V];
data.mCbCrStride = aImage->stride[AOM_PLANE_U];
data.mCbCrSize = gfx::IntSize(aom_img_plane_width(aImage, AOM_PLANE_U),
aom_img_plane_height(aImage, AOM_PLANE_U));
data.mCbSkip =
aImage->stride[AOM_PLANE_U] - aom_img_plane_width(aImage, AOM_PLANE_U);
data.mCrSkip =
aImage->stride[AOM_PLANE_V] - aom_img_plane_width(aImage, AOM_PLANE_V);
data.mPicX = 0;
data.mPicY = 0;
data.mPicSize = gfx::IntSize(aImage->d_w, aImage->d_h);
data.mPictureRect = gfx::IntRect(0, 0, aImage->d_w, aImage->d_h);
data.mStereoMode = StereoMode::MONO;
data.mColorDepth = ColorDepthForBitDepth(aImage->bit_depth);
if (aImage->x_chroma_shift == 1 && aImage->y_chroma_shift == 1) {
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
} else if (aImage->x_chroma_shift == 1 && aImage->y_chroma_shift == 0) {
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH;
} else if (aImage->x_chroma_shift != 0 || aImage->y_chroma_shift != 0) {
MOZ_ASSERT_UNREACHABLE("unexpected chroma shifts");
}
MOZ_ASSERT(aImage->bit_depth == BitDepthForColorDepth(data.mColorDepth));
auto av1ColourPrimaries = static_cast<CICP::ColourPrimaries>(aImage->cp);
@ -1382,24 +1378,25 @@ nsAVIFDecoder::DecodeResult nsAVIFDecoder::Decode(
static_cast<uint8_t>(decodedData.mColorRange)));
// Technically it's valid but we don't handle it now (Bug 1682318).
if (decodedData.mAlpha && decodedData.mAlpha->mSize != decodedData.mYSize) {
if (decodedData.mAlpha &&
decodedData.mAlpha->mSize != decodedData.YDataSize()) {
return AsVariant(NonDecoderResult::AlphaYSizeMismatch);
}
if (parsedImageSize.isNothing()) {
MOZ_LOG(sAVIFLog, LogLevel::Error,
("[this=%p] Using decoded image size: %d x %d", this,
decodedData.mPicSize.width, decodedData.mPicSize.height));
PostSize(decodedData.mPicSize.width, decodedData.mPicSize.height,
decodedData.mPictureRect.width, decodedData.mPictureRect.height));
PostSize(decodedData.mPictureRect.width, decodedData.mPictureRect.height,
orientation);
AccumulateCategorical(LABELS_AVIF_ISPE::absent);
} else if (decodedData.mPicSize.width != parsedImageSize->width ||
decodedData.mPicSize.height != parsedImageSize->height) {
} else if (decodedData.mPictureRect.width != parsedImageSize->width ||
decodedData.mPictureRect.height != parsedImageSize->height) {
MOZ_LOG(sAVIFLog, LogLevel::Error,
("[this=%p] Metadata image size doesn't match decoded image size: "
"(%d x %d) != (%d x %d)",
this, parsedImageSize->width, parsedImageSize->height,
decodedData.mPicSize.width, decodedData.mPicSize.height));
decodedData.mPictureRect.width, decodedData.mPictureRect.height));
AccumulateCategorical(LABELS_AVIF_ISPE::bitstream_mismatch);
return AsVariant(NonDecoderResult::MetadataImageSizeMismatch);
} else {
@ -1437,7 +1434,7 @@ nsAVIFDecoder::DecodeResult nsAVIFDecoder::Decode(
AccumulateCategorical(LABELS_AVIF_ALPHA::absent);
}
IntSize rgbSize = decodedData.mPicSize;
IntSize rgbSize = decodedData.mPictureRect.Size();
MOZ_ASSERT(
rgbSize ==
GetImageMetadata().GetOrientation().ToUnoriented(Size()).ToUnknownSize());