Bug 1355048: P8. Implement WebrtcMediaDataDecoderCodec. r=jesup

This provides the abilty to use the PlatformDecoderModule interface, including hardware accelerated ones.

Code is disabled by default via the media.navigator.mediadatadecoder_enabled preference.

MozReview-Commit-ID: 7bWJXEK8CoO

--HG--
extra : rebase_source : df3801c02d3ea6e4c120a4836c4893e18e37d694
This commit is contained in:
Jean-Yves Avenard 2017-07-01 01:51:00 +02:00
Родитель 3db14f695e
Коммит c46ffaa380
5 изменённых файлов: 290 добавлений и 23 удалений

Просмотреть файл

@ -3,6 +3,8 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "MediaDataDecoderCodec.h"
#include "MediaPrefs.h"
#include "WebrtcMediaDataDecoderCodec.h"
namespace mozilla {
@ -15,9 +17,21 @@ MediaDataDecoderCodec::CreateEncoder(
/* static */ WebrtcVideoDecoder*
MediaDataDecoderCodec::CreateDecoder(
webrtc::VideoCodecType aCodecbType)
webrtc::VideoCodecType aCodecType)
{
return nullptr;
if (!MediaPrefs::MediaDataDecoderEnabled()) {
return nullptr;
}
switch (aCodecType) {
case webrtc::VideoCodecType::kVideoCodecVP8:
case webrtc::VideoCodecType::kVideoCodecVP9:
case webrtc::VideoCodecType::kVideoCodecH264:
break;
default:
return nullptr;
}
return new WebrtcMediaDataDecoder();
}
} // namespace mozilla

Просмотреть файл

@ -7,10 +7,11 @@
#include "MediaConduitInterface.h"
#include "webrtc/common_types.h"
#include "webrtc/video_decoder.h"
namespace mozilla {
class WebrtcVideoDecoder;
class WebrtcVideoEncoder;
class MediaDataDecoderCodec
{
public:

Просмотреть файл

@ -3,49 +3,253 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "WebrtcMediaDataDecoderCodec.h"
#include "PlatformDecoderModule.h"
#include "ImageContainer.h"
#include "Layers.h"
#include "PDMFactory.h"
#include "VideoUtils.h"
#include "mozilla/layers/ImageBridgeChild.h"
#include "webrtc/base/keep_ref_until_done.h"
namespace mozilla {
class MediaDataDecoder;
WebrtcMediaDataDecoder::WebrtcMediaDataDecoder()
: mTaskQueue(
new TaskQueue(GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER),
"WebrtcMediaDataDecoder::mTaskQueue"))
, mImageContainer(layers::LayerManager::CreateImageContainer(
layers::ImageContainer::ASYNCHRONOUS))
, mFactory(new PDMFactory())
, mMonitor("WebrtcMediaDataDecoder")
{
}
WebrtcMediaDataDecoder::~WebrtcMediaDataDecoder()
{
mTaskQueue->BeginShutdown();
mTaskQueue->AwaitShutdownAndIdle();
}
int32_t
WebrtcMediaDataDecoder::InitDecode(const webrtc::VideoCodec* codecSettings,
int32_t numberOfCores)
WebrtcMediaDataDecoder::InitDecode(const webrtc::VideoCodec* aCodecSettings,
int32_t aNumberOfCores)
{
return 0;
nsCString codec;
switch (aCodecSettings->codecType) {
case webrtc::VideoCodecType::kVideoCodecVP8:
codec = "video/webm; codecs=vp8";
break;
case webrtc::VideoCodecType::kVideoCodecVP9:
codec = "video/webm; codecs=vp9";
break;
case webrtc::VideoCodecType::kVideoCodecH264:
codec = "video/avc";
break;
default:
return WEBRTC_VIDEO_CODEC_ERROR;
}
mTrackType = TrackInfo::kVideoTrack;
mInfo = VideoInfo(aCodecSettings->width, aCodecSettings->height);
mInfo.mMimeType = codec;
RefPtr<layers::KnowsCompositor> knowsCompositor =
layers::ImageBridgeChild::GetSingleton();
mDecoder = mFactory->CreateDecoder(
{ mInfo,
mTaskQueue,
CreateDecoderParams::OptionSet(CreateDecoderParams::Option::LowLatency),
mTrackType,
mImageContainer,
knowsCompositor });
if (!mDecoder) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
MonitorAutoLock lock(mMonitor);
bool done = false;
mDecoder->Init()->Then(mTaskQueue,
__func__,
[&](TrackInfo::TrackType) {
MonitorAutoLock lock(mMonitor);
done = true;
mMonitor.Notify();
},
[&](const MediaResult& aError) {
MonitorAutoLock lock(mMonitor);
done = true;
mError = aError;
mMonitor.Notify();
});
while (!done) {
mMonitor.Wait();
}
return NS_SUCCEEDED(mError) ? WEBRTC_VIDEO_CODEC_OK : WEBRTC_VIDEO_CODEC_ERROR;
}
int32_t
WebrtcMediaDataDecoder::Decode(
const webrtc::EncodedImage& inputImage,
bool missingFrames,
const webrtc::RTPFragmentationHeader* fragmentation,
const webrtc::CodecSpecificInfo* codecSpecificInfo,
int64_t renderTimeMs)
const webrtc::EncodedImage& aInputImage,
bool aMissingFrames,
const webrtc::RTPFragmentationHeader* aFragmentation,
const webrtc::CodecSpecificInfo* aCodecSpecificInfo,
int64_t aRenderTimeMs)
{
return 0;
if (!mCallback || !mDecoder) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
if (!aInputImage._buffer || !aInputImage._length) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
// Always start with a complete key frame.
if (mNeedKeyframe) {
if (aInputImage._frameType != webrtc::FrameType::kVideoFrameKey)
return WEBRTC_VIDEO_CODEC_ERROR;
// We have a key frame - is it complete?
if (aInputImage._completeFrame) {
mNeedKeyframe = false;
} else {
return WEBRTC_VIDEO_CODEC_ERROR;
}
}
RefPtr<MediaRawData> compressedFrame =
new MediaRawData(aInputImage._buffer, aInputImage._length);
if (!compressedFrame->Data()) {
return WEBRTC_VIDEO_CODEC_MEMORY;
}
compressedFrame->mTime =
media::TimeUnit::FromMicroseconds(aInputImage._timeStamp);
compressedFrame->mTimecode =
media::TimeUnit::FromMicroseconds(aRenderTimeMs * 1000);
compressedFrame->mKeyframe =
aInputImage._frameType == webrtc::FrameType::kVideoFrameKey;
{
MonitorAutoLock lock(mMonitor);
bool done = false;
mDecoder->Decode(compressedFrame)->Then(
mTaskQueue,
__func__,
[&](const MediaDataDecoder::DecodedData& aResults) {
MonitorAutoLock lock(mMonitor);
mResults = aResults;
done = true;
mMonitor.Notify();
},
[&](const MediaResult& aError) {
MonitorAutoLock lock(mMonitor);
mError = aError;
done = true;
mMonitor.Notify();
});
while (!done) {
mMonitor.Wait();
}
for (auto& frame : mResults) {
MOZ_ASSERT(frame->mType == MediaData::VIDEO_DATA);
RefPtr<VideoData> video = frame->As<VideoData>();
MOZ_ASSERT(video);
if (!video->mImage) {
// Nothing to display.
continue;
}
rtc::scoped_refptr<ImageBuffer> image(
new rtc::RefCountedObject<ImageBuffer>(Move(video->mImage)));
webrtc::VideoFrame videoFrame(image,
frame->mTime.ToMicroseconds(),
frame->mDuration.ToMicroseconds() * 1000,
aInputImage.rotation_);
mCallback->Decoded(videoFrame);
}
mResults.Clear();
}
return NS_SUCCEEDED(mError) ? WEBRTC_VIDEO_CODEC_OK
: WEBRTC_VIDEO_CODEC_ERROR;
}
int32_t
WebrtcMediaDataDecoder::RegisterDecodeCompleteCallback(
webrtc::DecodedImageCallback* callback)
webrtc::DecodedImageCallback* aCallback)
{
return 0;
mCallback = aCallback;
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t
WebrtcMediaDataDecoder::Release()
{
return 0;
MonitorAutoLock lock(mMonitor);
bool done = false;
mDecoder->Flush()
->Then(mTaskQueue,
__func__,
[this]() { return mDecoder->Shutdown(); },
[this](const MediaResult& aError) { return mDecoder->Shutdown(); })
->Then(mTaskQueue,
__func__,
[&]() {
MonitorAutoLock lock(mMonitor);
done = true;
mMonitor.Notify();
},
[]() { MOZ_ASSERT_UNREACHABLE("Shutdown promise always resolved"); });
while (!done) {
mMonitor.Wait();
}
mDecoder = nullptr;
mNeedKeyframe = true;
return WEBRTC_VIDEO_CODEC_OK;
}
bool
WebrtcMediaDataDecoder::OnTaskQueue() const
{
return OwnerThread()->IsCurrentThreadIn();
}
ImageBuffer::ImageBuffer(RefPtr<layers::Image>&& aImage)
: webrtc::NativeHandleBuffer(aImage,
aImage->GetSize().width,
aImage->GetSize().height)
, mImage(Move(aImage))
{
}
rtc::scoped_refptr<webrtc::VideoFrameBuffer>
ImageBuffer::NativeToI420Buffer()
{
RefPtr<layers::PlanarYCbCrImage> image = mImage->AsPlanarYCbCrImage();
if (!image) {
// TODO. YUV420 ReadBack, Image only provides a RGB readback.
return nullptr;
}
rtc::scoped_refptr<layers::PlanarYCbCrImage> refImage(image);
const layers::PlanarYCbCrData* data = image->GetData();
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buf(
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
data->mPicSize.width,
data->mPicSize.height,
data->mYChannel,
data->mYStride,
data->mCbChannel,
data->mCbCrStride,
data->mCrChannel,
data->mCbCrStride,
rtc::KeepRefUntilDone(refImage)));
return buf;
}
} // namespace mozilla

Просмотреть файл

@ -6,21 +6,39 @@
#define WebrtcMediaDataDecoderCodec_h__
#include "MediaConduitInterface.h"
#include "mozilla/RefPtr.h"
#include "MediaInfo.h"
#include "MediaResult.h"
#include "PlatformDecoderModule.h"
#include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h"
namespace webrtc {
class DecodedImageCallback;
}
namespace mozilla {
namespace layers {
class Image;
class ImageContainer;
}
class MediaDataDecoder;
class PDMFactory;
class TaskQueue;
class ImageBuffer : public webrtc::NativeHandleBuffer
{
public:
explicit ImageBuffer(RefPtr<layers::Image>&& aImage);
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
private:
RefPtr<layers::Image> mImage;
};
class WebrtcMediaDataDecoder : public WebrtcVideoDecoder
{
public:
WebrtcMediaDataDecoder();
virtual ~WebrtcMediaDataDecoder();
// Implement VideoDecoder interface.
uint64_t PluginID() const override { return 0; }
@ -37,6 +55,27 @@ public:
webrtc::DecodedImageCallback* callback) override;
int32_t Release() override;
private:
~WebrtcMediaDataDecoder();
void QueueFrame(MediaRawData* aFrame);
AbstractThread* OwnerThread() const { return mTaskQueue; }
bool OnTaskQueue() const;
const RefPtr<TaskQueue> mTaskQueue;
const RefPtr<layers::ImageContainer> mImageContainer;
const RefPtr<PDMFactory> mFactory;
RefPtr<MediaDataDecoder> mDecoder;
webrtc::DecodedImageCallback* mCallback = nullptr;
VideoInfo mInfo;
TrackInfo::TrackType mTrackType;
bool mNeedKeyframe = true;
MozPromiseRequestHolder<MediaDataDecoder::DecodePromise> mDecodeRequest;
Monitor mMonitor;
// Members below are accessed via mMonitor
MediaResult mError = NS_OK;
MediaDataDecoder::DecodedData mResults;
};
} // namespace mozilla

Просмотреть файл

@ -2198,6 +2198,15 @@ public:
uint32_t time_stamp,
int64_t render_time)
{
if (buffer.native_handle()) {
// We assume that only native handles are used with the
// WebrtcMediaDataDecoderCodec decoder.
RefPtr<Image> image = static_cast<Image*>(buffer.native_handle());
MutexAutoLock lock(mutex_);
image_ = image;
return;
}
MOZ_ASSERT(buffer.DataY());
// Create a video frame using |buffer|.
RefPtr<PlanarYCbCrImage> yuvImage =