2019-02-14 22:08:21 +03:00
|
|
|
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
|
|
|
|
/* vim: set ts=8 sts=2 et sw=2 tw=80: */
|
|
|
|
/* This Source Code Form is subject to the terms of the Mozilla Public
|
|
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
|
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
|
|
|
|
#include "RemoteVideoDecoder.h"
|
|
|
|
|
|
|
|
#include "mozilla/layers/ImageDataSerializer.h"
|
|
|
|
|
|
|
|
#ifdef MOZ_AV1
|
|
|
|
# include "AOMDecoder.h"
|
2019-03-28 13:41:29 +03:00
|
|
|
# include "DAV1DDecoder.h"
|
2019-02-14 22:08:21 +03:00
|
|
|
#endif
|
2019-06-11 05:01:51 +03:00
|
|
|
#ifdef XP_WIN
|
|
|
|
# include "WMFDecoderModule.h"
|
|
|
|
#endif
|
2019-02-14 22:08:21 +03:00
|
|
|
#include "ImageContainer.h" // for PlanarYCbCrData and BufferRecycleBin
|
2019-06-11 05:01:51 +03:00
|
|
|
#include "mozilla/layers/VideoBridgeChild.h"
|
|
|
|
#include "mozilla/layers/ImageClient.h"
|
|
|
|
#include "PDMFactory.h"
|
2019-02-14 22:08:21 +03:00
|
|
|
#include "RemoteDecoderManagerChild.h"
|
2019-06-11 05:01:51 +03:00
|
|
|
#include "RemoteDecoderManagerParent.h"
|
|
|
|
#include "GPUVideoImage.h"
|
|
|
|
#include "MediaInfo.h"
|
|
|
|
#include "mozilla/Telemetry.h"
|
|
|
|
#include "mozilla/layers/TextureClient.h"
|
2019-02-14 22:08:21 +03:00
|
|
|
|
|
|
|
namespace mozilla {
|
|
|
|
|
|
|
|
using namespace layers; // for PlanarYCbCrData and BufferRecycleBin
|
2019-06-11 05:01:51 +03:00
|
|
|
using namespace ipc;
|
|
|
|
using namespace gfx;
|
|
|
|
|
|
|
|
class KnowsCompositorVideo : public layers::KnowsCompositor {
|
|
|
|
public:
|
|
|
|
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(KnowsCompositorVideo, override)
|
|
|
|
|
2019-07-17 23:17:45 +03:00
|
|
|
VideoBridgeChild* GetVideoBridge() {
|
2019-06-28 10:00:41 +03:00
|
|
|
return mTextureFactoryIdentifier.mParentProcessType == GeckoProcessType_GPU
|
|
|
|
? VideoBridgeChild::GetSingletonToGPUProcess()
|
|
|
|
: VideoBridgeChild::GetSingletonToParentProcess();
|
2019-06-11 05:01:51 +03:00
|
|
|
}
|
2019-07-17 23:17:45 +03:00
|
|
|
|
|
|
|
layers::TextureForwarder* GetTextureForwarder() override {
|
|
|
|
return GetVideoBridge();
|
|
|
|
}
|
2019-06-11 05:01:51 +03:00
|
|
|
layers::LayersIPCActor* GetLayersIPCActor() override {
|
2019-06-28 10:00:41 +03:00
|
|
|
return GetTextureForwarder();
|
|
|
|
}
|
|
|
|
|
|
|
|
static already_AddRefed<KnowsCompositorVideo> TryCreateForIdentifier(
|
|
|
|
const layers::TextureFactoryIdentifier& aIdentifier) {
|
|
|
|
VideoBridgeChild* child =
|
|
|
|
(aIdentifier.mParentProcessType == GeckoProcessType_GPU)
|
|
|
|
? VideoBridgeChild::GetSingletonToGPUProcess()
|
|
|
|
: VideoBridgeChild::GetSingletonToParentProcess();
|
|
|
|
if (!child) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2019-07-01 10:58:26 +03:00
|
|
|
// The RDD process will never use hardware decoding since it's
|
|
|
|
// sandboxed, so don't bother trying to create a sync object.
|
|
|
|
TextureFactoryIdentifier ident = aIdentifier;
|
|
|
|
if (XRE_IsRDDProcess()) {
|
|
|
|
ident.mSyncHandle = 0;
|
|
|
|
}
|
|
|
|
|
2019-06-28 10:00:41 +03:00
|
|
|
RefPtr<KnowsCompositorVideo> knowsCompositor = new KnowsCompositorVideo();
|
2019-07-01 10:58:26 +03:00
|
|
|
knowsCompositor->IdentifyTextureHost(ident);
|
2019-06-28 10:00:41 +03:00
|
|
|
return knowsCompositor.forget();
|
2019-06-11 05:01:51 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
private:
|
2019-06-28 10:00:41 +03:00
|
|
|
KnowsCompositorVideo() = default;
|
2019-06-11 05:01:51 +03:00
|
|
|
virtual ~KnowsCompositorVideo() = default;
|
|
|
|
};
|
2019-02-14 22:08:21 +03:00
|
|
|
|
2019-06-11 05:01:51 +03:00
|
|
|
RemoteVideoDecoderChild::RemoteVideoDecoderChild(bool aRecreatedOnCrash)
|
|
|
|
: RemoteDecoderChild(aRecreatedOnCrash),
|
|
|
|
mBufferRecycleBin(new BufferRecycleBin) {}
|
2019-02-14 22:08:21 +03:00
|
|
|
|
|
|
|
RefPtr<mozilla::layers::Image> RemoteVideoDecoderChild::DeserializeImage(
|
|
|
|
const SurfaceDescriptorBuffer& aSdBuffer, const IntSize& aPicSize) {
|
|
|
|
MOZ_ASSERT(aSdBuffer.desc().type() == BufferDescriptor::TYCbCrDescriptor);
|
|
|
|
if (aSdBuffer.desc().type() != BufferDescriptor::TYCbCrDescriptor) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
const YCbCrDescriptor& descriptor = aSdBuffer.desc().get_YCbCrDescriptor();
|
|
|
|
|
|
|
|
uint8_t* buffer = nullptr;
|
|
|
|
const MemoryOrShmem& memOrShmem = aSdBuffer.data();
|
|
|
|
switch (memOrShmem.type()) {
|
|
|
|
case MemoryOrShmem::Tuintptr_t:
|
|
|
|
buffer = reinterpret_cast<uint8_t*>(memOrShmem.get_uintptr_t());
|
|
|
|
break;
|
|
|
|
case MemoryOrShmem::TShmem:
|
|
|
|
buffer = memOrShmem.get_Shmem().get<uint8_t>();
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
MOZ_ASSERT(false, "Unknown MemoryOrShmem type");
|
|
|
|
}
|
|
|
|
if (!buffer) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
PlanarYCbCrData pData;
|
|
|
|
pData.mYSize = descriptor.ySize();
|
|
|
|
pData.mYStride = descriptor.yStride();
|
|
|
|
pData.mCbCrSize = descriptor.cbCrSize();
|
|
|
|
pData.mCbCrStride = descriptor.cbCrStride();
|
|
|
|
// default mYSkip, mCbSkip, mCrSkip because not held in YCbCrDescriptor
|
|
|
|
pData.mYSkip = pData.mCbSkip = pData.mCrSkip = 0;
|
|
|
|
// default mPicX, mPicY because not held in YCbCrDescriptor
|
|
|
|
pData.mPicX = pData.mPicY = 0;
|
|
|
|
pData.mPicSize = aPicSize;
|
|
|
|
pData.mStereoMode = descriptor.stereoMode();
|
|
|
|
pData.mColorDepth = descriptor.colorDepth();
|
|
|
|
pData.mYUVColorSpace = descriptor.yUVColorSpace();
|
|
|
|
pData.mYChannel = ImageDataSerializer::GetYChannel(buffer, descriptor);
|
|
|
|
pData.mCbChannel = ImageDataSerializer::GetCbChannel(buffer, descriptor);
|
|
|
|
pData.mCrChannel = ImageDataSerializer::GetCrChannel(buffer, descriptor);
|
|
|
|
|
|
|
|
// images coming from AOMDecoder are RecyclingPlanarYCbCrImages.
|
|
|
|
RefPtr<RecyclingPlanarYCbCrImage> image =
|
|
|
|
new RecyclingPlanarYCbCrImage(mBufferRecycleBin);
|
2019-02-19 03:12:47 +03:00
|
|
|
bool setData = image->CopyData(pData);
|
|
|
|
MOZ_ASSERT(setData);
|
2019-02-14 22:08:21 +03:00
|
|
|
|
|
|
|
switch (memOrShmem.type()) {
|
|
|
|
case MemoryOrShmem::Tuintptr_t:
|
|
|
|
delete[] reinterpret_cast<uint8_t*>(memOrShmem.get_uintptr_t());
|
|
|
|
break;
|
|
|
|
case MemoryOrShmem::TShmem:
|
|
|
|
DeallocShmem(memOrShmem.get_Shmem());
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
MOZ_ASSERT(false, "Unknown MemoryOrShmem type");
|
|
|
|
}
|
|
|
|
|
2019-02-19 03:12:47 +03:00
|
|
|
if (!setData) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2019-02-14 22:08:21 +03:00
|
|
|
return image;
|
|
|
|
}
|
|
|
|
|
|
|
|
mozilla::ipc::IPCResult RemoteVideoDecoderChild::RecvOutput(
|
|
|
|
const DecodedOutputIPDL& aDecodedData) {
|
|
|
|
AssertOnManagerThread();
|
|
|
|
MOZ_ASSERT(aDecodedData.type() == DecodedOutputIPDL::TRemoteVideoDataIPDL);
|
2019-06-27 08:19:28 +03:00
|
|
|
|
2019-06-28 09:31:59 +03:00
|
|
|
const RemoteVideoDataIPDL& aData = aDecodedData.get_RemoteVideoDataIPDL();
|
2019-06-27 08:19:28 +03:00
|
|
|
|
2019-06-28 09:31:59 +03:00
|
|
|
if (aData.sd().type() == SurfaceDescriptor::TSurfaceDescriptorBuffer) {
|
|
|
|
RefPtr<Image> image = DeserializeImage(
|
|
|
|
aData.sd().get_SurfaceDescriptorBuffer(), aData.frameSize());
|
|
|
|
|
|
|
|
RefPtr<VideoData> video = VideoData::CreateFromImage(
|
|
|
|
aData.display(), aData.base().offset(), aData.base().time(),
|
|
|
|
aData.base().duration(), image, aData.base().keyframe(),
|
|
|
|
aData.base().timecode());
|
|
|
|
|
|
|
|
mDecodedData.AppendElement(std::move(video));
|
|
|
|
} else {
|
|
|
|
// The Image here creates a TextureData object that takes ownership
|
|
|
|
// of the SurfaceDescriptor, and is responsible for making sure that
|
|
|
|
// it gets deallocated.
|
|
|
|
RefPtr<Image> image =
|
|
|
|
new GPUVideoImage(GetManager(), aData.sd(), aData.frameSize());
|
|
|
|
|
|
|
|
RefPtr<VideoData> video = VideoData::CreateFromImage(
|
|
|
|
aData.display(), aData.base().offset(), aData.base().time(),
|
|
|
|
aData.base().duration(), image, aData.base().keyframe(),
|
|
|
|
aData.base().timecode());
|
|
|
|
|
|
|
|
mDecodedData.AppendElement(std::move(video));
|
|
|
|
}
|
2019-02-14 22:08:21 +03:00
|
|
|
return IPC_OK();
|
|
|
|
}
|
|
|
|
|
|
|
|
MediaResult RemoteVideoDecoderChild::InitIPDL(
|
|
|
|
const VideoInfo& aVideoInfo, float aFramerate,
|
2019-06-28 09:32:16 +03:00
|
|
|
const CreateDecoderParams::OptionSet& aOptions,
|
|
|
|
const layers::TextureFactoryIdentifier* aIdentifier) {
|
2019-02-14 22:08:21 +03:00
|
|
|
RefPtr<RemoteDecoderManagerChild> manager =
|
2019-06-11 05:01:34 +03:00
|
|
|
RemoteDecoderManagerChild::GetRDDProcessSingleton();
|
2019-02-14 22:08:21 +03:00
|
|
|
|
|
|
|
// The manager isn't available because RemoteDecoderManagerChild has been
|
|
|
|
// initialized with null end points and we don't want to decode video on RDD
|
|
|
|
// process anymore. Return false here so that we can fallback to other PDMs.
|
|
|
|
if (!manager) {
|
|
|
|
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
|
|
|
RESULT_DETAIL("RemoteDecoderManager is not available."));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!manager->CanSend()) {
|
|
|
|
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
|
|
|
RESULT_DETAIL("RemoteDecoderManager unable to send."));
|
|
|
|
}
|
|
|
|
|
2019-06-11 05:01:51 +03:00
|
|
|
mIPDLSelfRef = this;
|
|
|
|
bool success = false;
|
|
|
|
nsCString errorDescription;
|
|
|
|
nsCString blacklistedD3D11Driver;
|
|
|
|
nsCString blacklistedD3D9Driver;
|
|
|
|
VideoDecoderInfoIPDL decoderInfo(aVideoInfo, aFramerate);
|
|
|
|
if (manager->SendPRemoteDecoderConstructor(
|
2019-06-28 09:32:16 +03:00
|
|
|
this, decoderInfo, aOptions, ToMaybe(aIdentifier), &success,
|
2019-06-11 05:01:51 +03:00
|
|
|
&blacklistedD3D11Driver, &blacklistedD3D9Driver, &errorDescription)) {
|
|
|
|
mCanSend = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return success ? MediaResult(NS_OK)
|
|
|
|
: MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, errorDescription);
|
|
|
|
}
|
|
|
|
|
|
|
|
#ifdef XP_WIN
|
|
|
|
static void ReportUnblacklistingTelemetry(
|
|
|
|
bool isGPUProcessCrashed, const nsCString& aD3D11BlacklistedDriver,
|
|
|
|
const nsCString& aD3D9BlacklistedDriver) {
|
|
|
|
const nsCString& blacklistedDLL = !aD3D11BlacklistedDriver.IsEmpty()
|
|
|
|
? aD3D11BlacklistedDriver
|
|
|
|
: aD3D9BlacklistedDriver;
|
|
|
|
|
|
|
|
if (!blacklistedDLL.IsEmpty()) {
|
|
|
|
Telemetry::Accumulate(
|
|
|
|
Telemetry::VIDEO_UNBLACKINGLISTING_DXVA_DRIVER_RUNTIME_STATUS,
|
|
|
|
blacklistedDLL, isGPUProcessCrashed ? 1 : 0);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#endif // XP_WIN
|
|
|
|
|
|
|
|
GpuRemoteVideoDecoderChild::GpuRemoteVideoDecoderChild()
|
|
|
|
: RemoteVideoDecoderChild(true) {}
|
|
|
|
|
|
|
|
void GpuRemoteVideoDecoderChild::RecordShutdownTelemetry(
|
|
|
|
bool aAbnormalShutdown) {
|
|
|
|
#ifdef XP_WIN
|
|
|
|
ReportUnblacklistingTelemetry(aAbnormalShutdown, mBlacklistedD3D11Driver,
|
|
|
|
mBlacklistedD3D9Driver);
|
|
|
|
#endif // XP_WIN
|
|
|
|
}
|
|
|
|
|
|
|
|
MediaResult GpuRemoteVideoDecoderChild::InitIPDL(
|
|
|
|
const VideoInfo& aVideoInfo, float aFramerate,
|
|
|
|
const CreateDecoderParams::OptionSet& aOptions,
|
|
|
|
const layers::TextureFactoryIdentifier& aIdentifier) {
|
|
|
|
RefPtr<RemoteDecoderManagerChild> manager =
|
|
|
|
RemoteDecoderManagerChild::GetGPUProcessSingleton();
|
|
|
|
|
|
|
|
// The manager isn't available because RemoteDecoderManagerChild has been
|
|
|
|
// initialized with null end points and we don't want to decode video on GPU
|
|
|
|
// process anymore. Return false here so that we can fallback to other PDMs.
|
|
|
|
if (!manager) {
|
|
|
|
return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
|
|
|
RESULT_DETAIL("RemoteDecoderManager is not available."));
|
|
|
|
}
|
|
|
|
|
|
|
|
// The manager doesn't support sending messages because we've just crashed
|
|
|
|
// and are working on reinitialization. Don't initialize mIPDLSelfRef and
|
|
|
|
// leave us in an error state. We'll then immediately reject the promise when
|
|
|
|
// Init() is called and the caller can try again. Hopefully by then the new
|
|
|
|
// manager is ready, or we've notified the caller of it being no longer
|
|
|
|
// available. If not, then the cycle repeats until we're ready.
|
|
|
|
if (!manager->CanSend()) {
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2019-02-14 22:08:21 +03:00
|
|
|
mIPDLSelfRef = this;
|
|
|
|
bool success = false;
|
|
|
|
nsCString errorDescription;
|
|
|
|
VideoDecoderInfoIPDL decoderInfo(aVideoInfo, aFramerate);
|
2019-06-11 05:01:51 +03:00
|
|
|
if (manager->SendPRemoteDecoderConstructor(
|
2019-06-28 09:32:16 +03:00
|
|
|
this, decoderInfo, aOptions, Some(aIdentifier), &success,
|
2019-06-11 05:01:51 +03:00
|
|
|
&mBlacklistedD3D11Driver, &mBlacklistedD3D9Driver,
|
|
|
|
&errorDescription)) {
|
2019-02-14 22:08:21 +03:00
|
|
|
mCanSend = true;
|
|
|
|
}
|
|
|
|
|
|
|
|
return success ? MediaResult(NS_OK)
|
|
|
|
: MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, errorDescription);
|
|
|
|
}
|
|
|
|
|
|
|
|
RemoteVideoDecoderParent::RemoteVideoDecoderParent(
|
|
|
|
RemoteDecoderManagerParent* aParent, const VideoInfo& aVideoInfo,
|
|
|
|
float aFramerate, const CreateDecoderParams::OptionSet& aOptions,
|
2019-06-28 09:32:16 +03:00
|
|
|
const Maybe<layers::TextureFactoryIdentifier>& aIdentifier,
|
2019-02-14 22:08:21 +03:00
|
|
|
TaskQueue* aManagerTaskQueue, TaskQueue* aDecodeTaskQueue, bool* aSuccess,
|
|
|
|
nsCString* aErrorDescription)
|
|
|
|
: RemoteDecoderParent(aParent, aManagerTaskQueue, aDecodeTaskQueue),
|
|
|
|
mVideoInfo(aVideoInfo) {
|
2019-06-28 10:00:41 +03:00
|
|
|
if (aIdentifier) {
|
2019-07-01 10:58:26 +03:00
|
|
|
// Check to see if we have a direct PVideoBridge connection to the
|
|
|
|
// destination process specified in aIdentifier, and create a
|
|
|
|
// KnowsCompositor representing that connection if so. If this fails, then
|
|
|
|
// we fall back to returning the decoded frames directly via Output().
|
2019-06-28 10:00:41 +03:00
|
|
|
mKnowsCompositor =
|
|
|
|
KnowsCompositorVideo::TryCreateForIdentifier(*aIdentifier);
|
2019-06-11 05:01:51 +03:00
|
|
|
}
|
|
|
|
|
2019-02-14 22:08:21 +03:00
|
|
|
CreateDecoderParams params(mVideoInfo);
|
|
|
|
params.mTaskQueue = mDecodeTaskQueue;
|
2019-06-11 05:01:51 +03:00
|
|
|
params.mKnowsCompositor = mKnowsCompositor;
|
2019-02-14 22:08:21 +03:00
|
|
|
params.mImageContainer = new layers::ImageContainer();
|
|
|
|
params.mRate = CreateDecoderParams::VideoFrameRate(aFramerate);
|
|
|
|
params.mOptions = aOptions;
|
|
|
|
MediaResult error(NS_OK);
|
|
|
|
params.mError = &error;
|
|
|
|
|
2019-06-11 05:01:51 +03:00
|
|
|
if (XRE_IsGPUProcess()) {
|
|
|
|
#ifdef XP_WIN
|
|
|
|
// Ensure everything is properly initialized on the right thread.
|
|
|
|
PDMFactory::EnsureInit();
|
|
|
|
|
|
|
|
// TODO: Ideally we wouldn't hardcode the WMF PDM, and we'd use the normal
|
|
|
|
// PDM factory logic for picking a decoder.
|
|
|
|
RefPtr<WMFDecoderModule> pdm(new WMFDecoderModule());
|
|
|
|
pdm->Startup();
|
|
|
|
mDecoder = pdm->CreateVideoDecoder(params);
|
|
|
|
#else
|
|
|
|
MOZ_ASSERT(false,
|
|
|
|
"Can't use RemoteVideoDecoder in the GPU process on non-Windows "
|
|
|
|
"platforms yet");
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2019-02-14 22:08:21 +03:00
|
|
|
#ifdef MOZ_AV1
|
|
|
|
if (AOMDecoder::IsAV1(params.mConfig.mMimeType)) {
|
2019-06-28 07:09:05 +03:00
|
|
|
if (StaticPrefs::media_av1_use_dav1d()) {
|
2019-03-28 13:41:29 +03:00
|
|
|
mDecoder = new DAV1DDecoder(params);
|
|
|
|
} else {
|
|
|
|
mDecoder = new AOMDecoder(params);
|
|
|
|
}
|
2019-02-14 22:08:21 +03:00
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
if (NS_FAILED(error)) {
|
|
|
|
MOZ_ASSERT(aErrorDescription);
|
|
|
|
*aErrorDescription = error.Description();
|
|
|
|
}
|
|
|
|
|
|
|
|
*aSuccess = !!mDecoder;
|
|
|
|
}
|
|
|
|
|
2019-03-19 18:56:52 +03:00
|
|
|
MediaResult RemoteVideoDecoderParent::ProcessDecodedData(
|
2019-02-14 22:08:21 +03:00
|
|
|
const MediaDataDecoder::DecodedData& aData) {
|
|
|
|
MOZ_ASSERT(OnManagerThread());
|
|
|
|
|
2019-06-11 05:01:51 +03:00
|
|
|
// If the video decoder bridge has shut down, stop.
|
|
|
|
if (mKnowsCompositor && !mKnowsCompositor->GetTextureForwarder()) {
|
|
|
|
return NS_OK;
|
|
|
|
}
|
|
|
|
|
2019-02-14 22:08:21 +03:00
|
|
|
for (const auto& data : aData) {
|
2019-02-22 12:18:18 +03:00
|
|
|
MOZ_ASSERT(data->mType == MediaData::Type::VIDEO_DATA,
|
2019-02-14 22:08:21 +03:00
|
|
|
"Can only decode videos using RemoteDecoderParent!");
|
|
|
|
VideoData* video = static_cast<VideoData*>(data.get());
|
|
|
|
|
|
|
|
MOZ_ASSERT(video->mImage,
|
|
|
|
"Decoded video must output a layer::Image to "
|
|
|
|
"be used with RemoteDecoderParent");
|
|
|
|
|
2019-06-11 05:01:51 +03:00
|
|
|
SurfaceDescriptor sd;
|
|
|
|
IntSize size;
|
|
|
|
|
|
|
|
if (mKnowsCompositor) {
|
|
|
|
RefPtr<TextureClient> texture =
|
|
|
|
video->mImage->GetTextureClient(mKnowsCompositor);
|
|
|
|
|
|
|
|
if (!texture) {
|
|
|
|
texture = ImageClient::CreateTextureClientForImage(video->mImage,
|
|
|
|
mKnowsCompositor);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (texture && !texture->IsAddedToCompositableClient()) {
|
|
|
|
texture->InitIPDLActor(mKnowsCompositor);
|
|
|
|
texture->SetAddedToCompositableClient();
|
|
|
|
}
|
|
|
|
if (texture) {
|
|
|
|
sd = mParent->StoreImage(video->mImage, texture);
|
|
|
|
size = texture->GetSize();
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
PlanarYCbCrImage* image =
|
|
|
|
static_cast<PlanarYCbCrImage*>(video->mImage.get());
|
|
|
|
|
|
|
|
SurfaceDescriptorBuffer sdBuffer;
|
|
|
|
Shmem buffer;
|
|
|
|
if (!AllocShmem(image->GetDataSize(), Shmem::SharedMemory::TYPE_BASIC,
|
|
|
|
&buffer)) {
|
|
|
|
return MediaResult(NS_ERROR_OUT_OF_MEMORY,
|
|
|
|
"AllocShmem failed in "
|
|
|
|
"RemoteVideoDecoderParent::ProcessDecodedData");
|
|
|
|
}
|
|
|
|
if (image->GetDataSize() > buffer.Size<uint8_t>()) {
|
|
|
|
return MediaResult(NS_ERROR_OUT_OF_MEMORY,
|
|
|
|
"AllocShmem returned less than requested in "
|
|
|
|
"RemoteVideoDecoderParent::ProcessDecodedData");
|
|
|
|
}
|
|
|
|
|
|
|
|
sdBuffer.data() = std::move(buffer);
|
|
|
|
image->BuildSurfaceDescriptorBuffer(sdBuffer);
|
|
|
|
|
|
|
|
sd = sdBuffer;
|
|
|
|
size = image->GetSize();
|
2019-03-19 18:56:52 +03:00
|
|
|
}
|
|
|
|
|
2019-02-14 22:08:21 +03:00
|
|
|
RemoteVideoDataIPDL output(
|
2019-02-23 12:21:41 +03:00
|
|
|
MediaDataIPDL(data->mOffset, data->mTime, data->mTimecode,
|
|
|
|
data->mDuration, data->mKeyframe),
|
2019-06-11 05:01:51 +03:00
|
|
|
video->mDisplay, size, sd, video->mFrameID);
|
2019-07-17 23:17:45 +03:00
|
|
|
if (mKnowsCompositor) {
|
|
|
|
RefPtr<RemoteVideoDecoderParent> parent = this;
|
|
|
|
mKnowsCompositor->GetVideoBridge()->SendFlush()->Then(
|
|
|
|
GetCurrentThreadSerialEventTarget(), __func__,
|
|
|
|
[parent, output](bool) { Unused << parent->SendOutput(output); },
|
|
|
|
[parent](ResponseRejectReason&& aReason) {
|
|
|
|
parent->Error(NS_ERROR_DOM_MEDIA_NEED_NEW_DECODER);
|
|
|
|
});
|
|
|
|
|
|
|
|
} else {
|
|
|
|
Unused << SendOutput(output);
|
|
|
|
}
|
2019-02-14 22:08:21 +03:00
|
|
|
}
|
2019-03-19 18:56:52 +03:00
|
|
|
|
|
|
|
return NS_OK;
|
2019-02-14 22:08:21 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace mozilla
|