Bug 1376873 - WebrtcMediaDataDecoderCodec updates; r=pehrsons

This makes the ImageBuffer class implement VideoFrameBuffer class and makes
it available in MediaPipeline. To avoid exposing MediaPipeline to a lot of
internal details of WebrtcMediaDataDecoder, ImageBuffer is moved to its
own header file.

We're required to implement a ToI420() method. I've re-used the current
implementation of NativeToI420Buffer, which requires more development in order
to work properly as the Image class only supports RGB readback.

Differential Revision: https://phabricator.services.mozilla.com/D7435

--HG--
extra : rebase_source : 19ec409510f06a9fa7cab15c1f16c9a7bad0b433
This commit is contained in:
Dan Minor 2018-08-21 13:40:45 -04:00
Родитель fa42f190ae
Коммит bd4f2b743a
5 изменённых файлов: 85 добавлений и 46 удалений

Просмотреть файл

@ -0,0 +1,75 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef WebrtcImageBuffer_h__
#define WebrtcImageBuffer_h__
#include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/rtc_base/keep_ref_until_done.h"
namespace mozilla {
namespace layers {
class Image;
}
class ImageBuffer : public webrtc::VideoFrameBuffer
{
public:
explicit ImageBuffer(RefPtr<layers::Image>&& aImage)
: mImage(std::move(aImage))
{
}
rtc::scoped_refptr<webrtc::I420BufferInterface> ToI420() override
{
RefPtr<layers::PlanarYCbCrImage> image = mImage->AsPlanarYCbCrImage();
MOZ_ASSERT(image);
if (!image) {
// TODO. YUV420 ReadBack, Image only provides a RGB readback.
return nullptr;
}
const layers::PlanarYCbCrData* data = image->GetData();
rtc::scoped_refptr<webrtc::I420BufferInterface> buf(
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
data->mPicSize.width,
data->mPicSize.height,
data->mYChannel,
data->mYStride,
data->mCbChannel,
data->mCbCrStride,
data->mCrChannel,
data->mCbCrStride,
rtc::KeepRefUntilDone(image.get())));
return buf;
}
Type type() const override
{
return Type::kNative;
}
int width() const override
{
return mImage->GetSize().width;
}
int height() const override
{
return mImage->GetSize().height;
}
RefPtr<layers::Image> GetNativeImage() const
{
return mImage;
}
private:
const RefPtr<layers::Image> mImage;
};
} // namespace mozilla
#endif // WebrtcImageBuffer_h__

Просмотреть файл

@ -9,7 +9,7 @@
#include "VideoUtils.h" #include "VideoUtils.h"
#include "mozilla/media/MediaUtils.h" #include "mozilla/media/MediaUtils.h"
#include "mozilla/layers/ImageBridgeChild.h" #include "mozilla/layers/ImageBridgeChild.h"
#include "webrtc/base/keep_ref_until_done.h" #include "webrtc/rtc_base/keep_ref_until_done.h"
namespace mozilla { namespace mozilla {
@ -172,36 +172,4 @@ WebrtcMediaDataDecoder::OnTaskQueue() const
return OwnerThread()->IsCurrentThreadIn(); return OwnerThread()->IsCurrentThreadIn();
} }
ImageBuffer::ImageBuffer(RefPtr<layers::Image>&& aImage)
: webrtc::NativeHandleBuffer(aImage,
aImage->GetSize().width,
aImage->GetSize().height)
, mImage(std::move(aImage))
{
}
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
ImageBuffer::NativeToI420Buffer()
{
RefPtr<layers::PlanarYCbCrImage> image = mImage->AsPlanarYCbCrImage();
if (!image) {
// TODO. YUV420 ReadBack, Image only provides a RGB readback.
return nullptr;
}
rtc::scoped_refptr<layers::PlanarYCbCrImage> refImage(image);
const layers::PlanarYCbCrData* data = image->GetData();
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buf(
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
data->mPicSize.width,
data->mPicSize.height,
data->mYChannel,
data->mYStride,
data->mCbChannel,
data->mCbCrStride,
data->mCrChannel,
data->mCbCrStride,
rtc::KeepRefUntilDone(refImage)));
return buf;
}
} // namespace mozilla } // namespace mozilla

Просмотреть файл

@ -9,6 +9,7 @@
#include "MediaInfo.h" #include "MediaInfo.h"
#include "MediaResult.h" #include "MediaResult.h"
#include "PlatformDecoderModule.h" #include "PlatformDecoderModule.h"
#include "WebrtcImageBuffer.h"
#include "webrtc/common_video/include/video_frame_buffer.h" #include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h" #include "webrtc/modules/video_coding/include/video_codec_interface.h"
@ -25,16 +26,6 @@ class PDMFactory;
class SharedThreadPool; class SharedThreadPool;
class TaskQueue; class TaskQueue;
class ImageBuffer : public webrtc::NativeHandleBuffer
{
public:
explicit ImageBuffer(RefPtr<layers::Image>&& aImage);
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
private:
RefPtr<layers::Image> mImage;
};
class WebrtcMediaDataDecoder : public WebrtcVideoDecoder class WebrtcMediaDataDecoder : public WebrtcVideoDecoder
{ {
public: public:

Просмотреть файл

@ -14,6 +14,7 @@ LOCAL_INCLUDES += [
'/media/webrtc/signaling/src/common', '/media/webrtc/signaling/src/common',
'/media/webrtc/signaling/src/common/browser_logging', '/media/webrtc/signaling/src/common/browser_logging',
'/media/webrtc/signaling/src/common/time_profiling', '/media/webrtc/signaling/src/common/time_profiling',
'/media/webrtc/signaling/src/media-conduit',
'/media/webrtc/signaling/src/peerconnection', '/media/webrtc/signaling/src/peerconnection',
'/media/webrtc/trunk', '/media/webrtc/trunk',
'/media/webrtc/trunk/webrtc', '/media/webrtc/trunk/webrtc',

Просмотреть файл

@ -47,6 +47,7 @@
#include "runnable_utils.h" #include "runnable_utils.h"
#include "signaling/src/peerconnection/MediaTransportHandler.h" #include "signaling/src/peerconnection/MediaTransportHandler.h"
#include "Tracing.h" #include "Tracing.h"
#include "WebrtcImageBuffer.h"
#include "webrtc/base/bind.h" #include "webrtc/base/bind.h"
#include "webrtc/base/keep_ref_until_done.h" #include "webrtc/base/keep_ref_until_done.h"
@ -2040,15 +2041,18 @@ public:
uint32_t aTimeStamp, uint32_t aTimeStamp,
int64_t aRenderTime) int64_t aRenderTime)
{ {
if (aBuffer.native_handle()) { if (aBuffer.type() == webrtc::VideoFrameBuffer::Type::kNative) {
// We assume that only native handles are used with the // We assume that only native handles are used with the
// WebrtcMediaDataDecoderCodec decoder. // WebrtcMediaDataDecoderCodec decoder.
RefPtr<Image> image = static_cast<Image*>(aBuffer.native_handle()); const ImageBuffer *imageBuffer = static_cast<const ImageBuffer*>(&aBuffer);
MutexAutoLock lock(mMutex); MutexAutoLock lock(mMutex);
mImage = image; mImage = imageBuffer->GetNativeImage();
return; return;
} }
MOZ_ASSERT(aBuffer.type() == webrtc::VideoFrameBuffer::Type::kI420);
rtc::scoped_refptr<const webrtc::I420BufferInterface> i420 = aBuffer.GetI420();
MOZ_ASSERT(aBuffer.DataY()); MOZ_ASSERT(aBuffer.DataY());
// Create a video frame using |buffer|. // Create a video frame using |buffer|.
RefPtr<PlanarYCbCrImage> yuvImage = RefPtr<PlanarYCbCrImage> yuvImage =