Backed out 27 changesets (bug 1831451) for causing hazard failures on VideoDecoder.cpp. CLOSED TREE

Backed out changeset 4fb457340ffe (bug 1831451)
Backed out changeset 91c2c0f7e9ab (bug 1831451)
Backed out changeset 200c174fc2d1 (bug 1831451)
Backed out changeset cd30b650aeed (bug 1831451)
Backed out changeset 6892b88cb4ee (bug 1831451)
Backed out changeset ecb08baa7f0e (bug 1831451)
Backed out changeset f1b3b6d3186b (bug 1831451)
Backed out changeset 81c7ebd2aeb3 (bug 1831451)
Backed out changeset e2272bf547e1 (bug 1831451)
Backed out changeset 342a32c598fe (bug 1831451)
Backed out changeset f19cfef38d63 (bug 1831451)
Backed out changeset 1cf50ce36a73 (bug 1831451)
Backed out changeset d7f55df2c2e4 (bug 1831451)
Backed out changeset 61be9a6b9cd4 (bug 1831451)
Backed out changeset 89cf8d2548a8 (bug 1831451)
Backed out changeset 00bf4fed8bcb (bug 1831451)
Backed out changeset c8fdb24dd953 (bug 1831451)
Backed out changeset 06f1008127bb (bug 1831451)
Backed out changeset 68fae9bd7d92 (bug 1831451)
Backed out changeset 0fca9e6c2ef6 (bug 1831451)
Backed out changeset 05ae55dd0cec (bug 1831451)
Backed out changeset 8a3af1118791 (bug 1831451)
Backed out changeset 94ad056419ea (bug 1831451)
Backed out changeset f615134b7349 (bug 1831451)
Backed out changeset 4e7051264cb3 (bug 1831451)
Backed out changeset 8f3c10480da7 (bug 1831451)
Backed out changeset eb17a0f3efdf (bug 1831451)
This commit is contained in:
Cosmin Sabou 2023-06-29 13:41:34 +03:00
Родитель 7ebedfced8
Коммит b6c0d15922
21 изменённых файлов: 513 добавлений и 3656 удалений

Просмотреть файл

@ -27,7 +27,7 @@ BitReader::~BitReader() = default;
uint32_t BitReader::ReadBits(size_t aNum) {
MOZ_ASSERT(aNum <= 32);
if (mTotalBitsLeft < aNum) {
NS_WARNING("Reading past end of buffer");
NS_ASSERTION(false, "Reading past end of buffer");
return 0;
}
uint32_t result = 0;

Просмотреть файл

@ -1,491 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "DecoderAgent.h"
#include <atomic>
#include "ImageContainer.h"
#include "MediaDataDecoderProxy.h"
#include "PDMFactory.h"
#include "VideoUtils.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/Logging.h"
#include "mozilla/layers/ImageBridgeChild.h"
#include "nsThreadUtils.h"
extern mozilla::LazyLogModule gWebCodecsLog;
namespace mozilla {
#ifdef LOG_INTERNAL
# undef LOG_INTERNAL
#endif // LOG_INTERNAL
#define LOG_INTERNAL(level, msg, ...) \
MOZ_LOG(gWebCodecsLog, LogLevel::level, (msg, ##__VA_ARGS__))
#ifdef LOG
# undef LOG
#endif // LOG
#define LOG(msg, ...) LOG_INTERNAL(Debug, msg, ##__VA_ARGS__)
#ifdef LOGW
# undef LOGW
#endif // LOGE
#define LOGW(msg, ...) LOG_INTERNAL(Warning, msg, ##__VA_ARGS__)
#ifdef LOGE
# undef LOGE
#endif // LOGE
#define LOGE(msg, ...) LOG_INTERNAL(Error, msg, ##__VA_ARGS__)
#ifdef LOGV
# undef LOGV
#endif // LOGV
#define LOGV(msg, ...) LOG_INTERNAL(Verbose, msg, ##__VA_ARGS__)
DecoderAgent::DecoderAgent(Id aId, UniquePtr<TrackInfo>&& aInfo)
: mId(aId),
mInfo(std::move(aInfo)),
mOwnerThread(GetCurrentSerialEventTarget()),
mPDMFactory(MakeRefPtr<PDMFactory>()),
mImageContainer(MakeAndAddRef<layers::ImageContainer>(
layers::ImageContainer::ASYNCHRONOUS)),
mDecoder(nullptr),
mState(State::Unconfigured) {
MOZ_ASSERT(mInfo);
MOZ_ASSERT(mOwnerThread);
MOZ_ASSERT(mPDMFactory);
MOZ_ASSERT(mImageContainer);
LOG("DecoderAgent #%d (%p) ctor", mId, this);
}
DecoderAgent::~DecoderAgent() {
LOG("DecoderAgent #%d (%p) dtor", mId, this);
MOZ_ASSERT(mState == State::Unconfigured, "decoder release in wrong state");
MOZ_ASSERT(!mDecoder, "decoder must be shutdown");
}
RefPtr<DecoderAgent::ConfigurePromise> DecoderAgent::Configure(
bool aPreferSoftwareDecoder, bool aLowLatency) {
MOZ_ASSERT(mOwnerThread->IsOnCurrentThread());
MOZ_ASSERT(mState == State::Unconfigured || mState == State::Error);
MOZ_ASSERT(mConfigurePromise.IsEmpty());
MOZ_ASSERT(!mCreateRequest.Exists());
MOZ_ASSERT(!mInitRequest.Exists());
if (mState == State::Error) {
LOGE("DecoderAgent #%d (%p) tried to configure in error state", mId, this);
return ConfigurePromise::CreateAndReject(
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
"Cannot configure in error state"),
__func__);
}
MOZ_ASSERT(mState == State::Unconfigured);
MOZ_ASSERT(!mDecoder);
SetState(State::Configuring);
RefPtr<layers::KnowsCompositor> knowsCompositor =
layers::ImageBridgeChild::GetSingleton();
// Bug 1839993: FFmpegDataDecoder ignores all decode errors when draining so
// WPT cannot receive error callbacks. Forcibly enable LowLatency for now to
// get the decoded results immediately to avoid this.
auto params = CreateDecoderParams{
*mInfo,
CreateDecoderParams::OptionSet(
CreateDecoderParams::Option::LowLatency,
aPreferSoftwareDecoder
? CreateDecoderParams::Option::HardwareDecoderNotAllowed
: CreateDecoderParams::Option::Default),
mInfo->GetType(), mImageContainer, knowsCompositor};
LOG("DecoderAgent #%d (%p) is creating a decoder - PreferSW: %s, "
"low-latency: %syes",
mId, this, aPreferSoftwareDecoder ? "yes" : "no",
aLowLatency ? "" : "forcibly ");
RefPtr<ConfigurePromise> p = mConfigurePromise.Ensure(__func__);
mPDMFactory->CreateDecoder(params)
->Then(
mOwnerThread, __func__,
[self = RefPtr{this}](RefPtr<MediaDataDecoder>&& aDecoder) {
self->mCreateRequest.Complete();
// If DecoderAgent has been shut down, shut the created decoder down
// and return.
if (!self->mShutdownWhileCreationPromise.IsEmpty()) {
MOZ_ASSERT(self->mState == State::ShuttingDown);
MOZ_ASSERT(self->mConfigurePromise.IsEmpty(),
"configuration should have been rejected");
LOGW(
"DecoderAgent #%d (%p) has been shut down. We need to shut "
"the newly created decoder down",
self->mId, self.get());
aDecoder->Shutdown()->Then(
self->mOwnerThread, __func__,
[self](const ShutdownPromise::ResolveOrRejectValue& aValue) {
MOZ_ASSERT(self->mState == State::ShuttingDown);
LOGW(
"DecoderAgent #%d (%p), newly created decoder shutdown "
"has been %s",
self->mId, self.get(),
aValue.IsResolve() ? "resolved" : "rejected");
self->SetState(State::Unconfigured);
self->mShutdownWhileCreationPromise.ResolveOrReject(
aValue, __func__);
});
return;
}
self->mDecoder = new MediaDataDecoderProxy(
aDecoder.forget(),
CreateMediaDecodeTaskQueue("DecoderAgent TaskQueue"));
LOG("DecoderAgent #%d (%p) has created a decoder, now initialize "
"it",
self->mId, self.get());
self->mDecoder->Init()
->Then(
self->mOwnerThread, __func__,
[self](const TrackInfo::TrackType aTrackType) {
self->mInitRequest.Complete();
LOG("DecoderAgent #%d (%p) has initialized the decoder",
self->mId, self.get());
MOZ_ASSERT(aTrackType == self->mInfo->GetType());
self->SetState(State::Configured);
self->mConfigurePromise.Resolve(true, __func__);
},
[self](const MediaResult& aError) {
self->mInitRequest.Complete();
LOGE(
"DecoderAgent #%d (%p) failed to initialize the "
"decoder",
self->mId, self.get());
self->SetState(State::Error);
self->mConfigurePromise.Reject(aError, __func__);
})
->Track(self->mInitRequest);
},
[self = RefPtr{this}](const MediaResult& aError) {
self->mCreateRequest.Complete();
LOGE("DecoderAgent #%d (%p) failed to create a decoder", self->mId,
self.get());
// If DecoderAgent has been shut down, we need to resolve the
// shutdown promise.
if (!self->mShutdownWhileCreationPromise.IsEmpty()) {
MOZ_ASSERT(self->mState == State::ShuttingDown);
MOZ_ASSERT(self->mConfigurePromise.IsEmpty(),
"configuration should have been rejected");
LOGW(
"DecoderAgent #%d (%p) has been shut down. Resolve the "
"shutdown promise right away since decoder creation failed",
self->mId, self.get());
self->SetState(State::Unconfigured);
self->mShutdownWhileCreationPromise.Resolve(true, __func__);
return;
}
self->SetState(State::Error);
self->mConfigurePromise.Reject(aError, __func__);
})
->Track(mCreateRequest);
return p;
}
RefPtr<ShutdownPromise> DecoderAgent::Shutdown() {
MOZ_ASSERT(mOwnerThread->IsOnCurrentThread());
auto r =
MediaResult(NS_ERROR_DOM_MEDIA_CANCELED, "Canceled by decoder shutdown");
// If the decoder creation has not been completed yet, wait until the decoder
// being created has been shut down.
if (mCreateRequest.Exists()) {
MOZ_ASSERT(!mInitRequest.Exists());
MOZ_ASSERT(!mConfigurePromise.IsEmpty());
MOZ_ASSERT(!mDecoder);
MOZ_ASSERT(mState == State::Configuring);
MOZ_ASSERT(mShutdownWhileCreationPromise.IsEmpty());
LOGW(
"DecoderAgent #%d (%p) shutdown while the decoder-creation for "
"configuration is in flight. Reject the configuration now and defer "
"the shutdown until the created decoder has been shut down",
mId, this);
// Reject the configuration in flight.
mConfigurePromise.Reject(r, __func__);
// Get the promise that will be resolved when the decoder being created has
// been destroyed.
SetState(State::ShuttingDown);
return mShutdownWhileCreationPromise.Ensure(__func__);
}
// If decoder creation has been completed, we must have the decoder now.
MOZ_ASSERT(mDecoder);
// Cancel pending initialization for configuration in flight if any.
mInitRequest.DisconnectIfExists();
mConfigurePromise.RejectIfExists(r, __func__);
// Cancel decode in flight if any.
mDecodeRequest.DisconnectIfExists();
mDecodePromise.RejectIfExists(r, __func__);
// Cancel flush-out in flight if any.
mDrainRequest.DisconnectIfExists();
mFlushRequest.DisconnectIfExists();
mDryRequest.DisconnectIfExists();
mDryPromise.RejectIfExists(r, __func__);
mDrainAndFlushPromise.RejectIfExists(r, __func__);
mDryData.Clear();
mDrainAndFlushData.Clear();
SetState(State::Unconfigured);
RefPtr<MediaDataDecoder> decoder = std::move(mDecoder);
return decoder->Shutdown();
}
RefPtr<DecoderAgent::DecodePromise> DecoderAgent::Decode(
MediaRawData* aSample) {
MOZ_ASSERT(mOwnerThread->IsOnCurrentThread());
MOZ_ASSERT(aSample);
MOZ_ASSERT(mState == State::Configured || mState == State::Error);
MOZ_ASSERT(mDecodePromise.IsEmpty());
MOZ_ASSERT(!mDecodeRequest.Exists());
if (mState == State::Error) {
LOGE("DecoderAgent #%d (%p) tried to decode in error state", mId, this);
return DecodePromise::CreateAndReject(
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
"Cannot decode in error state"),
__func__);
}
MOZ_ASSERT(mState == State::Configured);
MOZ_ASSERT(mDecoder);
SetState(State::Decoding);
RefPtr<DecodePromise> p = mDecodePromise.Ensure(__func__);
mDecoder->Decode(aSample)
->Then(
mOwnerThread, __func__,
[self = RefPtr{this}](MediaDataDecoder::DecodedData&& aData) {
self->mDecodeRequest.Complete();
LOGV("DecoderAgent #%d (%p) decode successfully", self->mId,
self.get());
self->SetState(State::Configured);
self->mDecodePromise.Resolve(std::move(aData), __func__);
},
[self = RefPtr{this}](const MediaResult& aError) {
self->mDecodeRequest.Complete();
LOGV("DecoderAgent #%d (%p) failed to decode", self->mId,
self.get());
self->SetState(State::Error);
self->mDecodePromise.Reject(aError, __func__);
})
->Track(mDecodeRequest);
return p;
}
RefPtr<DecoderAgent::DecodePromise> DecoderAgent::DrainAndFlush() {
MOZ_ASSERT(mOwnerThread->IsOnCurrentThread());
MOZ_ASSERT(mState == State::Configured || mState == State::Error);
MOZ_ASSERT(mDrainAndFlushPromise.IsEmpty());
MOZ_ASSERT(mDrainAndFlushData.IsEmpty());
MOZ_ASSERT(!mDryRequest.Exists());
MOZ_ASSERT(mDryPromise.IsEmpty());
MOZ_ASSERT(mDryData.IsEmpty());
MOZ_ASSERT(!mDrainRequest.Exists());
MOZ_ASSERT(!mFlushRequest.Exists());
if (mState == State::Error) {
LOGE("DecoderAgent #%d (%p) tried to flush-out in error state", mId, this);
return DecodePromise::CreateAndReject(
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
"Cannot flush in error state"),
__func__);
}
MOZ_ASSERT(mState == State::Configured);
MOZ_ASSERT(mDecoder);
SetState(State::Flushing);
RefPtr<DecoderAgent::DecodePromise> p =
mDrainAndFlushPromise.Ensure(__func__);
Dry()
->Then(
mOwnerThread, __func__,
[self = RefPtr{this}](MediaDataDecoder::DecodedData&& aData) {
self->mDryRequest.Complete();
LOG("DecoderAgent #%d (%p) has dried the decoder. Now flushing the "
"decoder",
self->mId, self.get());
MOZ_ASSERT(self->mDrainAndFlushData.IsEmpty());
self->mDrainAndFlushData.AppendElements(std::move(aData));
self->mDecoder->Flush()
->Then(
self->mOwnerThread, __func__,
[self](const bool /* aUnUsed */) {
self->mFlushRequest.Complete();
LOG("DecoderAgent #%d (%p) has flushed the decoder",
self->mId, self.get());
self->SetState(State::Configured);
self->mDrainAndFlushPromise.Resolve(
std::move(self->mDrainAndFlushData), __func__);
},
[self](const MediaResult& aError) {
self->mFlushRequest.Complete();
LOGE("DecoderAgent #%d (%p) failed to flush the decoder",
self->mId, self.get());
self->SetState(State::Error);
self->mDrainAndFlushData.Clear();
self->mDrainAndFlushPromise.Reject(aError, __func__);
})
->Track(self->mFlushRequest);
},
[self = RefPtr{this}](const MediaResult& aError) {
self->mDryRequest.Complete();
LOGE("DecoderAgent #%d (%p) failed to dry the decoder", self->mId,
self.get());
self->SetState(State::Error);
self->mDrainAndFlushPromise.Reject(aError, __func__);
})
->Track(mDryRequest);
return p;
}
RefPtr<DecoderAgent::DecodePromise> DecoderAgent::Dry() {
MOZ_ASSERT(mOwnerThread->IsOnCurrentThread());
MOZ_ASSERT(mState == State::Flushing);
MOZ_ASSERT(mDryPromise.IsEmpty());
MOZ_ASSERT(!mDryRequest.Exists());
MOZ_ASSERT(mDryData.IsEmpty());
MOZ_ASSERT(mDecoder);
RefPtr<DecodePromise> p = mDryPromise.Ensure(__func__);
DrainUntilDry();
return p;
}
void DecoderAgent::DrainUntilDry() {
MOZ_ASSERT(mOwnerThread->IsOnCurrentThread());
MOZ_ASSERT(mState == State::Flushing);
MOZ_ASSERT(!mDryPromise.IsEmpty());
MOZ_ASSERT(!mDrainRequest.Exists());
MOZ_ASSERT(mDecoder);
LOG("DecoderAgent #%d (%p) is drainng the decoder", mId, this);
mDecoder->Drain()
->Then(
mOwnerThread, __func__,
[self = RefPtr{this}](MediaDataDecoder::DecodedData&& aData) {
self->mDrainRequest.Complete();
if (aData.IsEmpty()) {
LOG("DecoderAgent #%d (%p) is dry now", self->mId, self.get());
self->mDryPromise.Resolve(std::move(self->mDryData), __func__);
return;
}
LOG("DecoderAgent #%d (%p) drained %zu decoded data. Keep draining "
"until dry",
self->mId, self.get(), aData.Length());
self->mDryData.AppendElements(std::move(aData));
self->DrainUntilDry();
},
[self = RefPtr{this}](const MediaResult& aError) {
self->mDrainRequest.Complete();
LOGE("DecoderAgent %p failed to drain decoder", self.get());
self->mDryData.Clear();
self->mDryPromise.Reject(aError, __func__);
})
->Track(mDrainRequest);
}
void DecoderAgent::SetState(State aState) {
MOZ_ASSERT(mOwnerThread->IsOnCurrentThread());
auto validateStateTransition = [](State aOldState, State aNewState) {
switch (aOldState) {
case State::Unconfigured:
return aNewState == State::Configuring;
case State::Configuring:
return aNewState == State::Configured || aNewState == State::Error ||
aNewState == State::Unconfigured ||
aNewState == State::ShuttingDown;
case State::Configured:
return aNewState == State::Unconfigured ||
aNewState == State::Decoding || aNewState == State::Flushing;
case State::Decoding:
case State::Flushing:
return aNewState == State::Configured || aNewState == State::Error ||
aNewState == State::Unconfigured;
case State::ShuttingDown:
return aNewState == State::Unconfigured;
case State::Error:
return aNewState == State::Unconfigured;
default:
break;
}
MOZ_ASSERT_UNREACHABLE("Unhandled state transition");
return false;
};
auto stateToString = [](State aState) -> const char* {
switch (aState) {
case State::Unconfigured:
return "Unconfigured";
case State::Configuring:
return "Configuring";
case State::Configured:
return "Configured";
case State::Decoding:
return "Decoding";
case State::Flushing:
return "Flushing";
case State::ShuttingDown:
return "ShuttingDown";
case State::Error:
return "Error";
default:
break;
}
MOZ_ASSERT_UNREACHABLE("Unhandled state type");
return "Unknown";
};
DebugOnly<bool> isValid = validateStateTransition(mState, aState);
MOZ_ASSERT(isValid);
LOG("DecoderAgent #%d (%p) state change: %s -> %s", mId, this,
stateToString(mState), stateToString(aState));
mState = aState;
}
#undef LOG
#undef LOGW
#undef LOGE
#undef LOGV
#undef LOG_INTERNAL
} // namespace mozilla

Просмотреть файл

@ -1,117 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef DOM_MEDIA_WEBCODECS_DECODERAGENT_H
#define DOM_MEDIA_WEBCODECS_DECODERAGENT_H
#include "MediaResult.h"
#include "PlatformDecoderModule.h"
#include "mozilla/RefPtr.h"
#include "mozilla/TaskQueue.h"
#include "mozilla/UniquePtr.h"
class nsISerialEventTarget;
namespace mozilla {
class PDMFactory;
class TrackInfo;
namespace layers {
class ImageContainer;
} // namespace layers
// DecoderAgent is a wrapper that contains a MediaDataDecoder. It adapts the
// MediaDataDecoder APIs for use in WebCodecs.
//
// If Configure() is called, Shutdown() must be called to release the resources
// gracefully. Except Shutdown(), all the methods can't be called concurrently,
// meaning a method can only be called when the previous API call has completed.
// The responsability of arranging the method calls is on the caller.
//
// When Shutdown() is called, all the operations in flight are canceled and the
// MediaDataDecoder is shut down. On the other hand, errors are final. A new
// DecoderAgent must be created when an error is encountered.
//
// All the methods need to be called on the DecoderAgent's owner thread. In
// WebCodecs, it's either on the main thread or worker thread.
class DecoderAgent final {
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(DecoderAgent);
using Id = uint32_t;
DecoderAgent(Id aId, UniquePtr<TrackInfo>&& aInfo);
// The following APIs are owner thread only.
using ConfigurePromise = MozPromise<bool, MediaResult, true /* exclusive */>;
RefPtr<ConfigurePromise> Configure(bool aPreferSoftwareDecoder,
bool aLowLatency);
RefPtr<ShutdownPromise> Shutdown();
using DecodePromise = MediaDataDecoder::DecodePromise;
RefPtr<DecodePromise> Decode(MediaRawData* aSample);
// WebCodecs's flush() flushes out all the pending decoded data in the
// MediaDataDecoder so it is a combination of Drain and Flush. To distinguish
// the term from MediaDataDecoder's one, we call it DrainAndFlush() here.
RefPtr<DecodePromise> DrainAndFlush();
const Id mId; // A unique id.
const UniquePtr<TrackInfo> mInfo;
private:
~DecoderAgent();
// Push out all the data in the MediaDataDecoder's pipeline.
// TODO: MediaDataDecoder should implement this, instead of asking call site
// to run `Drain` multiple times.
RefPtr<DecodePromise> Dry();
void DrainUntilDry();
enum class State {
Unconfigured,
Configuring,
Configured,
Decoding,
Flushing,
ShuttingDown,
Error,
};
void SetState(State aState);
const RefPtr<nsISerialEventTarget> mOwnerThread;
const RefPtr<PDMFactory> mPDMFactory;
const RefPtr<layers::ImageContainer> mImageContainer;
RefPtr<MediaDataDecoder> mDecoder;
State mState;
// Configure
MozPromiseHolder<ConfigurePromise> mConfigurePromise;
using CreateDecoderPromise = PlatformDecoderModule::CreateDecoderPromise;
MozPromiseRequestHolder<CreateDecoderPromise> mCreateRequest;
using InitPromise = MediaDataDecoder::InitPromise;
MozPromiseRequestHolder<InitPromise> mInitRequest;
// Shutdown
MozPromiseHolder<ShutdownPromise> mShutdownWhileCreationPromise;
// Decode
MozPromiseHolder<DecodePromise> mDecodePromise;
MozPromiseRequestHolder<DecodePromise> mDecodeRequest;
// DrainAndFlush
MozPromiseHolder<DecodePromise> mDrainAndFlushPromise;
MediaDataDecoder::DecodedData mDrainAndFlushData;
MozPromiseRequestHolder<DecodePromise> mDryRequest;
MozPromiseHolder<DecodePromise> mDryPromise;
MediaDataDecoder::DecodedData mDryData;
MozPromiseRequestHolder<DecodePromise> mDrainRequest;
using FlushPromise = MediaDataDecoder::FlushPromise;
MozPromiseRequestHolder<FlushPromise> mFlushRequest;
};
} // namespace mozilla
#endif // DOM_MEDIA_WEBCODECS_DECODERAGENT_H

Просмотреть файл

@ -67,9 +67,6 @@ class EncodedVideoChunk final : public nsISupports, public nsWrapperCache {
const MaybeSharedArrayBufferViewOrMaybeSharedArrayBuffer& aDestination,
ErrorResult& aRv);
// Non-webidl method.
uint8_t* Data() { return mBuffer.get(); }
private:
// EncodedVideoChunk can run on either main thread or worker thread.
void AssertIsOnOwningThread() const {

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,219 +0,0 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_dom_VideoDecoder_h
#define mozilla_dom_VideoDecoder_h
#include <queue>
#include "js/TypeDecls.h"
#include "mozilla/Attributes.h"
#include "mozilla/DOMEventTargetHelper.h"
#include "mozilla/DecoderAgent.h"
#include "mozilla/ErrorResult.h"
#include "mozilla/Maybe.h"
#include "mozilla/RefPtr.h"
#include "mozilla/Result.h"
#include "mozilla/UniquePtr.h"
#include "mozilla/dom/BindingDeclarations.h"
#include "nsCycleCollectionParticipant.h"
#include "nsStringFwd.h"
#include "nsWrapperCache.h"
class nsIGlobalObject;
namespace mozilla {
class MediaData;
class TrackInfo;
class VideoInfo;
namespace dom {
class EncodedVideoChunk;
class EventHandlerNonNull;
class GlobalObject;
class Promise;
class ThreadSafeWorkerRef;
class VideoFrameOutputCallback;
class WebCodecsErrorCallback;
enum class CodecState : uint8_t;
struct VideoDecoderConfig;
struct VideoDecoderInit;
} // namespace dom
namespace media {
class ShutdownBlockingTicket;
}
} // namespace mozilla
namespace mozilla::dom {
class ConfigureMessage;
class DecodeMessage;
class FlushMessage;
class ControlMessage {
public:
explicit ControlMessage(const nsACString& aTitle);
virtual ~ControlMessage() = default;
virtual void Cancel() = 0;
virtual bool IsProcessing() = 0;
virtual const nsCString& ToString() const { return mTitle; }
virtual ConfigureMessage* AsConfigureMessage() { return nullptr; }
virtual DecodeMessage* AsDecodeMessage() { return nullptr; }
virtual FlushMessage* AsFlushMessage() { return nullptr; }
const nsCString mTitle; // Used to identify the message in the logs.
};
class VideoDecoder final : public DOMEventTargetHelper {
public:
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(VideoDecoder, DOMEventTargetHelper)
IMPL_EVENT_HANDLER(dequeue)
public:
VideoDecoder(nsIGlobalObject* aParent,
RefPtr<WebCodecsErrorCallback>&& aErrorCallback,
RefPtr<VideoFrameOutputCallback>&& aOutputCallback);
protected:
~VideoDecoder();
public:
JSObject* WrapObject(JSContext* aCx,
JS::Handle<JSObject*> aGivenProto) override;
static already_AddRefed<VideoDecoder> Constructor(
const GlobalObject& aGlobal, const VideoDecoderInit& aInit,
ErrorResult& aRv);
CodecState State() const;
uint32_t DecodeQueueSize() const;
void Configure(const VideoDecoderConfig& aConfig, ErrorResult& aRv);
void Decode(EncodedVideoChunk& aChunk, ErrorResult& aRv);
already_AddRefed<Promise> Flush(ErrorResult& aRv);
void Reset(ErrorResult& aRv);
void Close(ErrorResult& aRv);
static already_AddRefed<Promise> IsConfigSupported(
const GlobalObject& aGlobal, const VideoDecoderConfig& aConfig,
ErrorResult& aRv);
private:
// VideoDecoder can run on either main thread or worker thread.
void AssertIsOnOwningThread() const { NS_ASSERT_OWNINGTHREAD(VideoDecoder); }
Result<Ok, nsresult> Reset(const nsresult& aResult);
Result<Ok, nsresult> Close(const nsresult& aResult);
MOZ_CAN_RUN_SCRIPT void ReportError(const nsresult& aResult);
MOZ_CAN_RUN_SCRIPT void OutputVideoFrames(
nsTArray<RefPtr<MediaData>>&& aData);
class ErrorRunnable;
void ScheduleReportError(const nsresult& aResult);
class OutputRunnable;
void ScheduleOutputVideoFrames(nsTArray<RefPtr<MediaData>>&& aData,
const nsACString& aLabel);
void ScheduleClose(const nsresult& aResult);
void ScheduleDequeueEvent();
void SchedulePromiseResolveOrReject(already_AddRefed<Promise> aPromise,
const nsresult& aResult);
void ProcessControlMessageQueue();
void CancelPendingControlMessages(const nsresult& aResult);
enum class MessageProcessedResult { NotProcessed, Processed };
MessageProcessedResult ProcessConfigureMessage(
UniquePtr<ControlMessage>& aMessage);
MessageProcessedResult ProcessDecodeMessage(
UniquePtr<ControlMessage>& aMessage);
MessageProcessedResult ProcessFlushMessage(
UniquePtr<ControlMessage>& aMessage);
// Returns true when mAgent can be created.
bool CreateDecoderAgent(DecoderAgent::Id aId,
UniquePtr<VideoDecoderConfig>&& aConfig,
UniquePtr<TrackInfo>&& aInfo);
void DestroyDecoderAgentIfAny();
// Constant in practice, only set in ::Constructor.
RefPtr<WebCodecsErrorCallback> mErrorCallback;
RefPtr<VideoFrameOutputCallback> mOutputCallback;
CodecState mState;
bool mKeyChunkRequired;
bool mMessageQueueBlocked;
std::queue<UniquePtr<ControlMessage>> mControlMessageQueue;
UniquePtr<ControlMessage> mProcessingMessage;
// DecoderAgent will be created every time "configure" is being processed, and
// will be destroyed when "reset" or another "configure" is called (spec
// allows calling two "configure" without a "reset" in between).
RefPtr<DecoderAgent> mAgent;
UniquePtr<VideoDecoderConfig> mActiveConfig;
uint32_t mDecodeQueueSize;
bool mDequeueEventScheduled;
// A unique id tracking the ConfigureMessage and will be used as the
// DecoderAgent's Id.
uint32_t mLatestConfigureId;
// Tracking how many decode data has been enqueued and this number will be
// used as the DecodeMessage's Id.
size_t mDecodeCounter;
// Tracking how many flush request has been enqueued and this number will be
// used as the FlushMessage's Id.
size_t mFlushCounter;
// Used to add a nsIAsyncShutdownBlocker on main thread to block
// xpcom-shutdown before the underlying MediaDataDecoder is created. The
// blocker will be held until the underlying MediaDataDecoder has been shut
// down. This blocker guarantees RemoteDecoderManagerChild's thread, where the
// underlying RemoteMediaDataDecoder is on, outlives the
// RemoteMediaDataDecoder, since the thread releasing, which happens on main
// thread when getting a xpcom-shutdown signal, is blocked by the added
// blocker. As a result, RemoteMediaDataDecoder can safely work on worker
// thread with a holding blocker (otherwise, if RemoteDecoderManagerChild
// releases its thread on main thread before RemoteMediaDataDecoder's
// Shutdown() task run on worker thread, RemoteMediaDataDecoder has no thread
// to run).
UniquePtr<media::ShutdownBlockingTicket> mShutdownBlocker;
// Held to make sure the dispatched tasks can be done before worker is going
// away. As long as this worker-ref is held somewhere, the tasks dispatched to
// the worker can be executed (otherwise the tasks would be canceled). This
// ref should be activated as long as the underlying MediaDataDecoder is
// alive, and should keep alive until mShutdownBlocker is dropped, so all
// MediaDataDecoder's tasks and mShutdownBlocker-releasing task can be
// executed.
// TODO: Use StrongWorkerRef instead if this is always used in the same
// thread?
RefPtr<dom::ThreadSafeWorkerRef> mWorkerRef;
};
} // namespace mozilla::dom
#endif // mozilla_dom_VideoDecoder_h

Просмотреть файл

@ -40,21 +40,8 @@
#include "nsIPrincipal.h"
#include "nsIURI.h"
extern mozilla::LazyLogModule gWebCodecsLog;
namespace mozilla::dom {
#ifdef LOG_INTERNAL
# undef LOG_INTERNAL
#endif // LOG_INTERNAL
#define LOG_INTERNAL(level, msg, ...) \
MOZ_LOG(gWebCodecsLog, LogLevel::level, (msg, ##__VA_ARGS__))
#ifdef LOGW
# undef LOGW
#endif // LOGW
#define LOGW(msg, ...) LOG_INTERNAL(Warning, msg, ##__VA_ARGS__)
// Only needed for refcounted objects.
NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(VideoFrame, mParent)
NS_IMPL_CYCLE_COLLECTING_ADDREF(VideoFrame)
@ -64,6 +51,106 @@ NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(VideoFrame)
NS_INTERFACE_MAP_ENTRY(nsISupports)
NS_INTERFACE_MAP_END
/*
* The following are utilities to convert between VideoColorSpace values to
* gfx's values.
*/
static gfx::YUVColorSpace ToColorSpace(VideoMatrixCoefficients aMatrix) {
switch (aMatrix) {
case VideoMatrixCoefficients::Rgb:
return gfx::YUVColorSpace::Identity;
case VideoMatrixCoefficients::Bt709:
case VideoMatrixCoefficients::Bt470bg:
return gfx::YUVColorSpace::BT709;
case VideoMatrixCoefficients::Smpte170m:
return gfx::YUVColorSpace::BT601;
case VideoMatrixCoefficients::Bt2020_ncl:
return gfx::YUVColorSpace::BT2020;
case VideoMatrixCoefficients::EndGuard_:
break;
}
MOZ_ASSERT_UNREACHABLE("unsupported VideoMatrixCoefficients");
return gfx::YUVColorSpace::Default;
}
static gfx::TransferFunction ToTransferFunction(
VideoTransferCharacteristics aTransfer) {
switch (aTransfer) {
case VideoTransferCharacteristics::Bt709:
case VideoTransferCharacteristics::Smpte170m:
return gfx::TransferFunction::BT709;
case VideoTransferCharacteristics::Iec61966_2_1:
return gfx::TransferFunction::SRGB;
case VideoTransferCharacteristics::Pq:
return gfx::TransferFunction::PQ;
case VideoTransferCharacteristics::Hlg:
return gfx::TransferFunction::HLG;
case VideoTransferCharacteristics::Linear:
case VideoTransferCharacteristics::EndGuard_:
break;
}
MOZ_ASSERT_UNREACHABLE("unsupported VideoTransferCharacteristics");
return gfx::TransferFunction::Default;
}
static gfx::ColorSpace2 ToPrimaries(VideoColorPrimaries aPrimaries) {
switch (aPrimaries) {
case VideoColorPrimaries::Bt709:
return gfx::ColorSpace2::BT709;
case VideoColorPrimaries::Bt470bg:
return gfx::ColorSpace2::BT601_625;
case VideoColorPrimaries::Smpte170m:
return gfx::ColorSpace2::BT601_525;
case VideoColorPrimaries::Bt2020:
return gfx::ColorSpace2::BT2020;
case VideoColorPrimaries::Smpte432:
return gfx::ColorSpace2::DISPLAY_P3;
case VideoColorPrimaries::EndGuard_:
break;
}
MOZ_ASSERT_UNREACHABLE("unsupported VideoTransferCharacteristics");
return gfx::ColorSpace2::UNKNOWN;
}
static Maybe<VideoPixelFormat> ToVideoPixelFormat(gfx::SurfaceFormat aFormat) {
switch (aFormat) {
case gfx::SurfaceFormat::B8G8R8A8:
return Some(VideoPixelFormat::BGRA);
case gfx::SurfaceFormat::B8G8R8X8:
return Some(VideoPixelFormat::BGRX);
case gfx::SurfaceFormat::R8G8B8A8:
return Some(VideoPixelFormat::RGBA);
case gfx::SurfaceFormat::R8G8B8X8:
return Some(VideoPixelFormat::RGBX);
case gfx::SurfaceFormat::NV12:
return Some(VideoPixelFormat::NV12);
default:
break;
}
return Nothing();
}
static Maybe<VideoPixelFormat> ToVideoPixelFormat(ImageBitmapFormat aFormat) {
switch (aFormat) {
case ImageBitmapFormat::RGBA32:
return Some(VideoPixelFormat::RGBA);
case ImageBitmapFormat::BGRA32:
return Some(VideoPixelFormat::BGRA);
case ImageBitmapFormat::YUV444P:
return Some(VideoPixelFormat::I444);
case ImageBitmapFormat::YUV422P:
return Some(VideoPixelFormat::I422);
case ImageBitmapFormat::YUV420P:
return Some(VideoPixelFormat::I420);
case ImageBitmapFormat::YUV420SP_NV12:
return Some(VideoPixelFormat::NV12);
default:
break;
}
return Nothing();
}
/*
* The following are helpers to read the image data from the given buffer and
* the format. The data layout is illustrated in the comments for
@ -367,12 +454,9 @@ ValidateVideoFrameBufferInit(const VideoFrameBufferInit& aInit) {
// https://w3c.github.io/webcodecs/#videoframe-verify-rect-offset-alignment
static Result<Ok, nsCString> VerifyRectOffsetAlignment(
const Maybe<VideoFrame::Format>& aFormat, const gfx::IntRect& aRect) {
if (!aFormat) {
return Ok();
}
for (const VideoFrame::Format::Plane& p : aFormat->Planes()) {
const gfx::IntSize sample = aFormat->SampleSize(p);
const VideoFrame::Format& aFormat, const gfx::IntRect& aRect) {
for (const VideoFrame::Format::Plane& p : aFormat.Planes()) {
const gfx::IntSize sample = aFormat.SampleSize(p);
if (aRect.X() % sample.Width() != 0) {
return Err(nsCString("Mismatch between format and given left offset"));
}
@ -399,7 +483,7 @@ static Result<gfx::IntRect, nsCString> ParseVisibleRect(
rect = *aOverrideRect;
}
MOZ_TRY(VerifyRectOffsetAlignment(Some(aFormat), rect));
MOZ_TRY(VerifyRectOffsetAlignment(aFormat, rect));
return rect;
}
@ -645,7 +729,7 @@ static VideoColorSpaceInit PickColorSpace(
// https://w3c.github.io/webcodecs/#validate-videoframeinit
static Result<std::pair<Maybe<gfx::IntRect>, Maybe<gfx::IntSize>>, nsCString>
ValidateVideoFrameInit(const VideoFrameInit& aInit,
const Maybe<VideoFrame::Format>& aFormat,
const VideoFrame::Format& aFormat,
const gfx::IntSize& aCodedSize) {
if (aCodedSize.Width() <= 0 || aCodedSize.Height() <= 0) {
return Err(nsCString("codedWidth and codedHeight must be positive"));
@ -790,8 +874,8 @@ static Result<RefPtr<layers::Image>, nsCString> CreateYUVImageFromBuffer(
data.mCbCrStride = reader->mStrideU;
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
// Color settings.
if (!aColorSpace.mFullRange.IsNull()) {
data.mColorRange = ToColorRange(aColorSpace.mFullRange.Value());
if (!aColorSpace.mFullRange.IsNull() && aColorSpace.mFullRange.Value()) {
data.mColorRange = gfx::ColorRange::FULL;
}
MOZ_RELEASE_ASSERT(!aColorSpace.mMatrix.IsNull());
data.mYUVColorSpace = ToColorSpace(aColorSpace.mMatrix.Value());
@ -834,8 +918,8 @@ static Result<RefPtr<layers::Image>, nsCString> CreateYUVImageFromBuffer(
data.mCbCrStride = reader.mStrideUV;
data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
// Color settings.
if (!aColorSpace.mFullRange.IsNull()) {
data.mColorRange = ToColorRange(aColorSpace.mFullRange.Value());
if (!aColorSpace.mFullRange.IsNull() && aColorSpace.mFullRange.Value()) {
data.mColorRange = gfx::ColorRange::FULL;
}
MOZ_RELEASE_ASSERT(!aColorSpace.mMatrix.IsNull());
data.mYUVColorSpace = ToColorSpace(aColorSpace.mMatrix.Value());
@ -958,7 +1042,7 @@ static Result<RefPtr<VideoFrame>, nsCString> CreateVideoFrameFromBuffer(
// TODO: Spec should assign aInit.mFormat to inner format value:
// https://github.com/w3c/webcodecs/issues/509.
// This comment should be removed once the issue is resolved.
return MakeRefPtr<VideoFrame>(aGlobal, data, Some(aInit.mFormat), codedSize,
return MakeRefPtr<VideoFrame>(aGlobal, data, aInit.mFormat, codedSize,
parsedRect,
displaySize ? *displaySize : parsedRect.Size(),
duration, aInit.mTimestamp, colorSpace);
@ -1013,22 +1097,24 @@ InitializeFrameWithResourceAndSize(
RefPtr<gfx::SourceSurface> surface = image->GetAsSourceSurface();
Maybe<VideoFrame::Format> format =
SurfaceFormatToVideoPixelFormat(surface->GetFormat())
ToVideoPixelFormat(surface->GetFormat())
.map([](const VideoPixelFormat& aFormat) {
return VideoFrame::Format(aFormat);
});
// TODO: Handle `ToVideoPixelFormat` failure.
if (NS_WARN_IF(!format)) {
return Err(nsCString("This image has unsupport format"));
}
std::pair<Maybe<gfx::IntRect>, Maybe<gfx::IntSize>> init;
MOZ_TRY_VAR(init, ValidateVideoFrameInit(aInit, format, image->GetSize()));
MOZ_TRY_VAR(init,
ValidateVideoFrameInit(aInit, format.ref(), image->GetSize()));
Maybe<gfx::IntRect> visibleRect = init.first;
Maybe<gfx::IntSize> displaySize = init.second;
if (format && aInit.mAlpha == AlphaOption::Discard) {
if (aInit.mAlpha == AlphaOption::Discard) {
format->MakeOpaque();
// Keep the alpha data in image for now until it's being rendered.
// TODO: The alpha will still be rendered if the format is unrecognized
// since no additional flag keeping this request. Should spec address what
// to do in this case?
}
InitializeVisibleRectAndDisplaySize(visibleRect, displaySize,
@ -1040,10 +1126,10 @@ InitializeFrameWithResourceAndSize(
// TODO: WPT will fail if we guess a VideoColorSpace here.
const VideoColorSpaceInit colorSpace{};
return MakeAndAddRef<VideoFrame>(
aGlobal, image, format ? Some(format->PixelFormat()) : Nothing(),
image->GetSize(), visibleRect.value(), displaySize.value(), duration,
aInit.mTimestamp.Value(), colorSpace);
return MakeAndAddRef<VideoFrame>(aGlobal, image, format->PixelFormat(),
image->GetSize(), visibleRect.value(),
displaySize.value(), duration,
aInit.mTimestamp.Value(), colorSpace);
}
// https://w3c.github.io/webcodecs/#videoframe-initialize-frame-from-other-frame
@ -1053,14 +1139,10 @@ InitializeFrameFromOtherFrame(nsIGlobalObject* aGlobal, VideoFrameData&& aData,
MOZ_ASSERT(aGlobal);
MOZ_ASSERT(aData.mImage);
Maybe<VideoFrame::Format> format =
aData.mFormat ? Some(VideoFrame::Format(*aData.mFormat)) : Nothing();
if (format && aInit.mAlpha == AlphaOption::Discard) {
format->MakeOpaque();
VideoFrame::Format format(aData.mFormat);
if (aInit.mAlpha == AlphaOption::Discard) {
format.MakeOpaque();
// Keep the alpha data in image for now until it's being rendered.
// TODO: The alpha will still be rendered if the format is unrecognized
// since no additional flag keeping this request. Should spec address what
// to do in this case?
}
std::pair<Maybe<gfx::IntRect>, Maybe<gfx::IntSize>> init;
@ -1079,9 +1161,8 @@ InitializeFrameFromOtherFrame(nsIGlobalObject* aGlobal, VideoFrameData&& aData,
: aData.mTimestamp;
return MakeAndAddRef<VideoFrame>(
aGlobal, aData.mImage, format ? Some(format->PixelFormat()) : Nothing(),
aData.mImage->GetSize(), *visibleRect, *displaySize, duration, timestamp,
aData.mColorSpace);
aGlobal, aData.mImage, format.PixelFormat(), aData.mImage->GetSize(),
*visibleRect, *displaySize, duration, timestamp, aData.mColorSpace);
}
/*
@ -1089,7 +1170,7 @@ InitializeFrameFromOtherFrame(nsIGlobalObject* aGlobal, VideoFrameData&& aData,
*/
VideoFrameData::VideoFrameData(layers::Image* aImage,
const Maybe<VideoPixelFormat>& aFormat,
const VideoPixelFormat& aFormat,
gfx::IntRect aVisibleRect,
gfx::IntSize aDisplaySize,
Maybe<uint64_t> aDuration, int64_t aTimestamp,
@ -1103,7 +1184,7 @@ VideoFrameData::VideoFrameData(layers::Image* aImage,
mColorSpace(aColorSpace) {}
VideoFrameSerializedData::VideoFrameSerializedData(
layers::Image* aImage, const Maybe<VideoPixelFormat>& aFormat,
layers::Image* aImage, const VideoPixelFormat& aFormat,
gfx::IntSize aCodedSize, gfx::IntRect aVisibleRect,
gfx::IntSize aDisplaySize, Maybe<uint64_t> aDuration, int64_t aTimestamp,
const VideoColorSpaceInit& aColorSpace,
@ -1116,14 +1197,15 @@ VideoFrameSerializedData::VideoFrameSerializedData(
/*
* W3C Webcodecs VideoFrame implementation
*/
VideoFrame::VideoFrame(nsIGlobalObject* aParent,
const RefPtr<layers::Image>& aImage,
const Maybe<VideoPixelFormat>& aFormat,
gfx::IntSize aCodedSize, gfx::IntRect aVisibleRect,
gfx::IntSize aDisplaySize,
const VideoPixelFormat& aFormat, gfx::IntSize aCodedSize,
gfx::IntRect aVisibleRect, gfx::IntSize aDisplaySize,
const Maybe<uint64_t>& aDuration, int64_t aTimestamp,
const VideoColorSpaceInit& aColorSpace)
: mParent(aParent),
mResource(Some(Resource(aImage, VideoFrame::Format(aFormat)))),
mCodedSize(aCodedSize),
mVisibleRect(aVisibleRect),
mDisplaySize(aDisplaySize),
@ -1131,13 +1213,6 @@ VideoFrame::VideoFrame(nsIGlobalObject* aParent,
mTimestamp(aTimestamp),
mColorSpace(aColorSpace) {
MOZ_ASSERT(mParent);
mResource.emplace(
Resource(aImage, aFormat.map([](const VideoPixelFormat& aPixelFormat) {
return VideoFrame::Format(aPixelFormat);
})));
if (!mResource->mFormat) {
LOGW("Create a VideoFrame with an unrecognized image format");
}
}
VideoFrame::VideoFrame(const VideoFrame& aOther)
@ -1368,13 +1443,16 @@ already_AddRefed<VideoFrame> VideoFrame::Constructor(
}
const ImageUtils imageUtils(image);
Maybe<VideoPixelFormat> format =
ImageBitmapFormatToVideoPixelFormat(imageUtils.GetFormat());
Maybe<VideoPixelFormat> format = ToVideoPixelFormat(imageUtils.GetFormat());
if (!format) {
aRv.ThrowTypeError("The video's image is in unsupported format");
return nullptr;
}
// TODO: Retrive/infer the duration, and colorspace.
auto r = InitializeFrameFromOtherFrame(
global.get(),
VideoFrameData(image.get(), format, image->GetPictureRect(),
VideoFrameData(image.get(), format.ref(), image->GetPictureRect(),
image->GetSize(), Nothing(),
static_cast<int64_t>(aVideoElement.CurrentTime()), {}),
aInit);
@ -1511,7 +1589,7 @@ already_AddRefed<VideoFrame> VideoFrame::Constructor(
auto r = InitializeFrameFromOtherFrame(
global.get(),
VideoFrameData(aVideoFrame.mResource->mImage.get(),
aVideoFrame.mResource->TryPixelFormat(),
aVideoFrame.mResource->mFormat.PixelFormat(),
aVideoFrame.mVisibleRect, aVideoFrame.mDisplaySize,
aVideoFrame.mDuration, aVideoFrame.mTimestamp,
aVideoFrame.mColorSpace),
@ -1544,8 +1622,8 @@ already_AddRefed<VideoFrame> VideoFrame::Constructor(
Nullable<VideoPixelFormat> VideoFrame::GetFormat() const {
AssertIsOnOwningThread();
return mResource && mResource->mFormat
? Nullable<VideoPixelFormat>(mResource->mFormat->PixelFormat())
return mResource
? Nullable<VideoPixelFormat>(mResource->mFormat.PixelFormat())
: Nullable<VideoPixelFormat>();
}
@ -1631,13 +1709,8 @@ uint32_t VideoFrame::AllocationSize(const VideoFrameCopyToOptions& aOptions,
return 0;
}
if (!mResource->mFormat) {
aRv.ThrowAbortError("The VideoFrame image format is not VideoPixelFormat");
return 0;
}
auto r = ParseVideoFrameCopyToOptions(aOptions, mVisibleRect, mCodedSize,
mResource->mFormat.ref());
mResource->mFormat);
if (r.isErr()) {
// TODO: Should throw layout.
aRv.ThrowTypeError(r.unwrapErr());
@ -1659,11 +1732,6 @@ already_AddRefed<Promise> VideoFrame::CopyTo(
return nullptr;
}
if (!mResource->mFormat) {
aRv.ThrowNotSupportedError("VideoFrame's image format is unrecognized");
return nullptr;
}
RefPtr<Promise> p = Promise::Create(mParent.get(), aRv);
if (NS_WARN_IF(aRv.Failed())) {
return p.forget();
@ -1671,7 +1739,7 @@ already_AddRefed<Promise> VideoFrame::CopyTo(
CombinedBufferLayout layout;
auto r1 = ParseVideoFrameCopyToOptions(aOptions, mVisibleRect, mCodedSize,
mResource->mFormat.ref());
mResource->mFormat);
if (r1.isErr()) {
// TODO: Should reject with layout.
p->MaybeRejectWithTypeError(r1.unwrapErr());
@ -1693,7 +1761,7 @@ already_AddRefed<Promise> VideoFrame::CopyTo(
Sequence<PlaneLayout> planeLayouts;
nsTArray<Format::Plane> planes = mResource->mFormat->Planes();
nsTArray<Format::Plane> planes = mResource->mFormat.Planes();
MOZ_ASSERT(layout.mComputedLayouts.Length() == planes.Length());
// TODO: These jobs can be run in a thread pool (bug 1780656) to unblock the
@ -1713,17 +1781,17 @@ already_AddRefed<Promise> VideoFrame::CopyTo(
// Copy pixels of `size` starting from `origin` on planes[i] to
// `aDestination`.
gfx::IntPoint origin(
l.mSourceLeftBytes / mResource->mFormat->SampleBytes(planes[i]),
l.mSourceLeftBytes / mResource->mFormat.SampleBytes(planes[i]),
l.mSourceTop);
gfx::IntSize size(
l.mSourceWidthBytes / mResource->mFormat->SampleBytes(planes[i]),
l.mSourceWidthBytes / mResource->mFormat.SampleBytes(planes[i]),
l.mSourceHeight);
if (!mResource->CopyTo(planes[i], {origin, size},
buffer.From(destinationOffset),
static_cast<size_t>(l.mDestinationStride))) {
p->MaybeRejectWithTypeError(
nsPrintfCString("Failed to copy image data in %s plane",
mResource->mFormat->PlaneName(planes[i])));
mResource->mFormat.PlaneName(planes[i])));
return p.forget();
}
}
@ -1802,7 +1870,7 @@ bool VideoFrame::WriteStructuredClone(JSStructuredCloneWriter* aWriter,
// The serialization is limited to the same process scope so it's ok to
// serialize a reference instead of a copy.
aHolder->VideoFrames().AppendElement(VideoFrameSerializedData(
image.get(), mResource->TryPixelFormat(), mCodedSize, mVisibleRect,
image.get(), mResource->mFormat.PixelFormat(), mCodedSize, mVisibleRect,
mDisplaySize, mDuration, mTimestamp, mColorSpace, GetPrincipalURI()));
return !NS_WARN_IF(!JS_WriteUint32Pair(aWriter, SCTAG_DOM_VIDEOFRAME, index));
@ -1818,7 +1886,7 @@ UniquePtr<VideoFrame::TransferredData> VideoFrame::Transfer() {
Resource r = mResource.extract();
auto frame = MakeUnique<TransferredData>(
r.mImage.get(), r.TryPixelFormat(), mCodedSize, mVisibleRect,
r.mImage.get(), r.mFormat.PixelFormat(), mCodedSize, mVisibleRect,
mDisplaySize, mDuration, mTimestamp, mColorSpace, GetPrincipalURI());
Close();
return frame;
@ -2142,7 +2210,7 @@ bool VideoFrame::Format::IsYUV() const { return IsYUVFormat(mFormat); }
*/
VideoFrame::Resource::Resource(const RefPtr<layers::Image>& aImage,
Maybe<class Format>&& aFormat)
const class Format& aFormat)
: mImage(aImage), mFormat(aFormat) {
MOZ_ASSERT(mImage);
}
@ -2152,18 +2220,12 @@ VideoFrame::Resource::Resource(const Resource& aOther)
MOZ_ASSERT(mImage);
}
Maybe<VideoPixelFormat> VideoFrame::Resource::TryPixelFormat() const {
return mFormat ? Some(mFormat->PixelFormat()) : Nothing();
}
uint32_t VideoFrame::Resource::Stride(const Format::Plane& aPlane) const {
MOZ_RELEASE_ASSERT(mFormat);
CheckedInt<uint32_t> width(mImage->GetSize().Width());
switch (aPlane) {
case Format::Plane::Y: // and RGBA
case Format::Plane::A:
switch (mFormat->PixelFormat()) {
switch (mFormat.PixelFormat()) {
case VideoPixelFormat::I420:
case VideoPixelFormat::I420A:
case VideoPixelFormat::I422:
@ -2173,20 +2235,20 @@ uint32_t VideoFrame::Resource::Stride(const Format::Plane& aPlane) const {
case VideoPixelFormat::RGBX:
case VideoPixelFormat::BGRA:
case VideoPixelFormat::BGRX:
return (width * mFormat->SampleBytes(aPlane)).value();
return (width * mFormat.SampleBytes(aPlane)).value();
case VideoPixelFormat::EndGuard_:
MOZ_ASSERT_UNREACHABLE("invalid format");
}
return 0;
case Format::Plane::U: // and UV
case Format::Plane::V:
switch (mFormat->PixelFormat()) {
switch (mFormat.PixelFormat()) {
case VideoPixelFormat::I420:
case VideoPixelFormat::I420A:
case VideoPixelFormat::I422:
case VideoPixelFormat::I444:
case VideoPixelFormat::NV12:
return (((width + 1) / 2) * mFormat->SampleBytes(aPlane)).value();
return (((width + 1) / 2) * mFormat.SampleBytes(aPlane)).value();
case VideoPixelFormat::RGBA:
case VideoPixelFormat::RGBX:
case VideoPixelFormat::BGRA:
@ -2204,22 +2266,18 @@ bool VideoFrame::Resource::CopyTo(const Format::Plane& aPlane,
const gfx::IntRect& aRect,
Span<uint8_t>&& aPlaneDest,
size_t aDestinationStride) const {
if (!mFormat) {
return false;
}
auto copyPlane = [&](const uint8_t* aPlaneData) {
MOZ_ASSERT(aPlaneData);
CheckedInt<size_t> offset(aRect.Y());
offset *= Stride(aPlane);
offset += aRect.X() * mFormat->SampleBytes(aPlane);
offset += aRect.X() * mFormat.SampleBytes(aPlane);
if (!offset.isValid()) {
return false;
}
CheckedInt<size_t> elementsBytes(aRect.Width());
elementsBytes *= mFormat->SampleBytes(aPlane);
elementsBytes *= mFormat.SampleBytes(aPlane);
if (!elementsBytes.isValid()) {
return false;
}
@ -2264,7 +2322,7 @@ bool VideoFrame::Resource::CopyTo(const Format::Plane& aPlane,
// BGRA). To get the data in the matched format, we create a temp buffer
// holding the image data in that format and then copy them to
// `aDestination`.
const gfx::SurfaceFormat f = mFormat->ToSurfaceFormat();
const gfx::SurfaceFormat f = mFormat.ToSurfaceFormat();
MOZ_ASSERT(f == gfx::SurfaceFormat::R8G8B8A8 ||
f == gfx::SurfaceFormat::R8G8B8X8 ||
f == gfx::SurfaceFormat::B8G8R8A8 ||
@ -2307,7 +2365,7 @@ bool VideoFrame::Resource::CopyTo(const Format::Plane& aPlane,
case Format::Plane::V:
return copyPlane(mImage->AsPlanarYCbCrImage()->GetData()->mCrChannel);
case Format::Plane::A:
MOZ_ASSERT(mFormat->PixelFormat() == VideoPixelFormat::I420A);
MOZ_ASSERT(mFormat.PixelFormat() == VideoPixelFormat::I420A);
MOZ_ASSERT(mImage->AsPlanarYCbCrImage()->GetData()->mAlpha);
return copyPlane(
mImage->AsPlanarYCbCrImage()->GetData()->mAlpha->mChannel);
@ -2327,13 +2385,7 @@ bool VideoFrame::Resource::CopyTo(const Format::Plane& aPlane,
}
}
// TODO: ImageFormat::MAC_IOSURFACE or ImageFormat::DMABUF
LOGW("Cannot copy image data of an unrecognized format");
return false;
}
#undef LOGW
#undef LOG_INTERNAL
} // namespace mozilla::dom

Просмотреть файл

@ -55,13 +55,13 @@ struct VideoFrameCopyToOptions;
namespace mozilla::dom {
struct VideoFrameData {
VideoFrameData(layers::Image* aImage, const Maybe<VideoPixelFormat>& aFormat,
VideoFrameData(layers::Image* aImage, const VideoPixelFormat& aFormat,
gfx::IntRect aVisibleRect, gfx::IntSize aDisplaySize,
Maybe<uint64_t> aDuration, int64_t aTimestamp,
const VideoColorSpaceInit& aColorSpace);
const RefPtr<layers::Image> mImage;
const Maybe<VideoPixelFormat> mFormat;
const VideoPixelFormat mFormat;
const gfx::IntRect mVisibleRect;
const gfx::IntSize mDisplaySize;
const Maybe<uint64_t> mDuration;
@ -71,7 +71,7 @@ struct VideoFrameData {
struct VideoFrameSerializedData : VideoFrameData {
VideoFrameSerializedData(layers::Image* aImage,
const Maybe<VideoPixelFormat>& aFormat,
const VideoPixelFormat& aFormat,
gfx::IntSize aCodedSize, gfx::IntRect aVisibleRect,
gfx::IntSize aDisplaySize, Maybe<uint64_t> aDuration,
int64_t aTimestamp,
@ -89,7 +89,7 @@ class VideoFrame final : public nsISupports, public nsWrapperCache {
public:
VideoFrame(nsIGlobalObject* aParent, const RefPtr<layers::Image>& aImage,
const Maybe<VideoPixelFormat>& aFormat, gfx::IntSize aCodedSize,
const VideoPixelFormat& aFormat, gfx::IntSize aCodedSize,
gfx::IntRect aVisibleRect, gfx::IntSize aDisplaySize,
const Maybe<uint64_t>& aDuration, int64_t aTimestamp,
const VideoColorSpaceInit& aColorSpace);
@ -216,17 +216,15 @@ class VideoFrame final : public nsISupports, public nsWrapperCache {
// A class representing the VideoFrame's data.
class Resource final {
public:
Resource(const RefPtr<layers::Image>& aImage, Maybe<Format>&& aFormat);
Resource(const RefPtr<layers::Image>& aImage, const Format& aFormat);
Resource(const Resource& aOther);
~Resource() = default;
Maybe<VideoPixelFormat> TryPixelFormat() const;
uint32_t Stride(const Format::Plane& aPlane) const;
bool CopyTo(const Format::Plane& aPlane, const gfx::IntRect& aRect,
Span<uint8_t>&& aPlaneDest, size_t aDestinationStride) const;
const RefPtr<layers::Image> mImage;
// Nothing() if mImage is not in VideoPixelFormat
const Maybe<Format> mFormat;
const Format mFormat;
};
nsCOMPtr<nsIGlobalObject> mParent;

Просмотреть файл

@ -6,42 +6,12 @@
#include "WebCodecsUtils.h"
#include "VideoUtils.h"
#include "js/experimental/TypedData.h"
#include "mozilla/Assertions.h"
#include "mozilla/CheckedInt.h"
#include "mozilla/dom/ImageBitmapBinding.h"
#include "mozilla/dom/VideoColorSpaceBinding.h"
#include "mozilla/dom/VideoFrameBinding.h"
#include "mozilla/gfx/Types.h"
#include "nsDebug.h"
namespace mozilla::dom {
/*
* The followings are helpers for VideoDecoder methods
*/
nsTArray<nsCString> GuessContainers(const nsString& aCodec) {
if (IsAV1CodecString(aCodec)) {
return {"mp4"_ns, "webm"_ns};
}
if (IsVP9CodecString(aCodec)) {
return {"mp4"_ns, "webm"_ns, "ogg"_ns};
}
if (IsVP8CodecString(aCodec)) {
return {"webm"_ns, "ogg"_ns, "3gpp"_ns, "3gpp2"_ns, "3gp2"_ns};
}
if (IsH264CodecString(aCodec)) {
return {"mp4"_ns, "3gpp"_ns, "3gpp2"_ns, "3gp2"_ns};
}
return {};
}
/*
* The below are helpers to operate ArrayBuffer or ArrayBufferView.
*/
@ -79,226 +49,4 @@ Result<Span<uint8_t>, nsresult> GetSharedArrayBufferData(
return GetArrayBufferData(aBuffer.GetAsArrayBuffer());
}
static std::tuple<JS::ArrayBufferOrView, size_t, size_t> GetArrayBufferInfo(
JSContext* aCx,
const OwningMaybeSharedArrayBufferViewOrMaybeSharedArrayBuffer& aBuffer) {
if (aBuffer.IsArrayBuffer()) {
const ArrayBuffer& buffer = aBuffer.GetAsArrayBuffer();
buffer.ComputeState();
CheckedInt<size_t> byteLength(buffer.Length());
byteLength *= sizeof(JS::ArrayBuffer::DataType);
return byteLength.isValid()
? std::make_tuple(
JS::ArrayBufferOrView::fromObject(buffer.Obj()), (size_t)0,
byteLength.value())
: std::make_tuple(JS::ArrayBufferOrView::fromObject(nullptr),
(size_t)0, (size_t)0);
}
MOZ_ASSERT(aBuffer.IsArrayBufferView());
const ArrayBufferView& view = aBuffer.GetAsArrayBufferView();
bool isSharedMemory;
JS::Rooted<JSObject*> obj(aCx, view.Obj());
return std::make_tuple(
JS::ArrayBufferOrView::fromObject(
JS_GetArrayBufferViewBuffer(aCx, obj, &isSharedMemory)),
JS_GetTypedArrayByteOffset(obj), JS_GetTypedArrayByteLength(obj));
}
Result<OwningMaybeSharedArrayBufferViewOrMaybeSharedArrayBuffer, nsresult>
CloneBuffer(
JSContext* aCx,
const OwningMaybeSharedArrayBufferViewOrMaybeSharedArrayBuffer& aBuffer) {
std::tuple<JS::ArrayBufferOrView, size_t, size_t> info =
GetArrayBufferInfo(aCx, aBuffer);
const JS::ArrayBufferOrView& bufOrView = std::get<0>(info);
size_t offset = std::get<1>(info);
size_t len = std::get<2>(info);
if (NS_WARN_IF(!bufOrView)) {
return Err(NS_ERROR_UNEXPECTED);
}
JS::Rooted<JSObject*> obj(aCx, bufOrView.asObject());
JS::Rooted<JSObject*> cloned(aCx,
JS::ArrayBufferClone(aCx, obj, offset, len));
if (NS_WARN_IF(!cloned)) {
return Err(NS_ERROR_OUT_OF_MEMORY);
}
JS::Rooted<JS::Value> value(aCx, JS::ObjectValue(*cloned));
OwningMaybeSharedArrayBufferViewOrMaybeSharedArrayBuffer clonedBufOrView;
if (NS_WARN_IF(!clonedBufOrView.Init(aCx, value))) {
return Err(NS_ERROR_UNEXPECTED);
}
return clonedBufOrView;
}
/*
* The following are utilities to convert between VideoColorSpace values to
* gfx's values.
*/
gfx::ColorRange ToColorRange(bool aIsFullRange) {
return aIsFullRange ? gfx::ColorRange::FULL : gfx::ColorRange::LIMITED;
}
gfx::YUVColorSpace ToColorSpace(VideoMatrixCoefficients aMatrix) {
switch (aMatrix) {
case VideoMatrixCoefficients::Rgb:
return gfx::YUVColorSpace::Identity;
case VideoMatrixCoefficients::Bt709:
case VideoMatrixCoefficients::Bt470bg:
return gfx::YUVColorSpace::BT709;
case VideoMatrixCoefficients::Smpte170m:
return gfx::YUVColorSpace::BT601;
case VideoMatrixCoefficients::Bt2020_ncl:
return gfx::YUVColorSpace::BT2020;
case VideoMatrixCoefficients::EndGuard_:
break;
}
MOZ_ASSERT_UNREACHABLE("unsupported VideoMatrixCoefficients");
return gfx::YUVColorSpace::Default;
}
gfx::TransferFunction ToTransferFunction(
VideoTransferCharacteristics aTransfer) {
switch (aTransfer) {
case VideoTransferCharacteristics::Bt709:
case VideoTransferCharacteristics::Smpte170m:
return gfx::TransferFunction::BT709;
case VideoTransferCharacteristics::Iec61966_2_1:
return gfx::TransferFunction::SRGB;
case VideoTransferCharacteristics::Pq:
return gfx::TransferFunction::PQ;
case VideoTransferCharacteristics::Hlg:
return gfx::TransferFunction::HLG;
case VideoTransferCharacteristics::Linear:
case VideoTransferCharacteristics::EndGuard_:
break;
}
MOZ_ASSERT_UNREACHABLE("unsupported VideoTransferCharacteristics");
return gfx::TransferFunction::Default;
}
gfx::ColorSpace2 ToPrimaries(VideoColorPrimaries aPrimaries) {
switch (aPrimaries) {
case VideoColorPrimaries::Bt709:
return gfx::ColorSpace2::BT709;
case VideoColorPrimaries::Bt470bg:
return gfx::ColorSpace2::BT601_625;
case VideoColorPrimaries::Smpte170m:
return gfx::ColorSpace2::BT601_525;
case VideoColorPrimaries::Bt2020:
return gfx::ColorSpace2::BT2020;
case VideoColorPrimaries::Smpte432:
return gfx::ColorSpace2::DISPLAY_P3;
case VideoColorPrimaries::EndGuard_:
break;
}
MOZ_ASSERT_UNREACHABLE("unsupported VideoTransferCharacteristics");
return gfx::ColorSpace2::UNKNOWN;
}
bool ToFullRange(const gfx::ColorRange& aColorRange) {
return aColorRange == gfx::ColorRange::FULL;
}
Maybe<VideoMatrixCoefficients> ToMatrixCoefficients(
const gfx::YUVColorSpace& aColorSpace) {
switch (aColorSpace) {
case gfx::YUVColorSpace::BT601:
return Some(VideoMatrixCoefficients::Smpte170m);
case gfx::YUVColorSpace::BT709:
return Some(VideoMatrixCoefficients::Bt709);
case gfx::YUVColorSpace::BT2020:
return Some(VideoMatrixCoefficients::Bt2020_ncl);
case gfx::YUVColorSpace::Identity:
return Some(VideoMatrixCoefficients::Rgb);
}
MOZ_ASSERT_UNREACHABLE("unsupported gfx::YUVColorSpace");
return Nothing();
}
Maybe<VideoTransferCharacteristics> ToTransferCharacteristics(
const gfx::TransferFunction& aTransferFunction) {
switch (aTransferFunction) {
case gfx::TransferFunction::BT709:
return Some(VideoTransferCharacteristics::Bt709);
case gfx::TransferFunction::SRGB:
return Some(VideoTransferCharacteristics::Iec61966_2_1);
case gfx::TransferFunction::PQ:
return Some(VideoTransferCharacteristics::Pq);
case gfx::TransferFunction::HLG:
return Some(VideoTransferCharacteristics::Hlg);
}
MOZ_ASSERT_UNREACHABLE("unsupported gfx::TransferFunction");
return Nothing();
}
Maybe<VideoColorPrimaries> ToPrimaries(const gfx::ColorSpace2& aColorSpace) {
switch (aColorSpace) {
case gfx::ColorSpace2::UNKNOWN:
case gfx::ColorSpace2::SRGB:
return Nothing();
case gfx::ColorSpace2::DISPLAY_P3:
return Some(VideoColorPrimaries::Smpte432);
case gfx::ColorSpace2::BT601_525:
return Some(VideoColorPrimaries::Smpte170m);
case gfx::ColorSpace2::BT709:
return Some(VideoColorPrimaries::Bt709);
case gfx::ColorSpace2::BT2020:
return Some(VideoColorPrimaries::Bt2020);
}
MOZ_ASSERT_UNREACHABLE("unsupported gfx::ColorSpace2");
return Nothing();
}
/*
* The following are utilities to convert from gfx's formats to
* VideoPixelFormats.
*/
Maybe<VideoPixelFormat> SurfaceFormatToVideoPixelFormat(
gfx::SurfaceFormat aFormat) {
switch (aFormat) {
case gfx::SurfaceFormat::B8G8R8A8:
return Some(VideoPixelFormat::BGRA);
case gfx::SurfaceFormat::B8G8R8X8:
return Some(VideoPixelFormat::BGRX);
case gfx::SurfaceFormat::R8G8B8A8:
return Some(VideoPixelFormat::RGBA);
case gfx::SurfaceFormat::R8G8B8X8:
return Some(VideoPixelFormat::RGBX);
case gfx::SurfaceFormat::YUV:
return Some(VideoPixelFormat::I420);
case gfx::SurfaceFormat::NV12:
return Some(VideoPixelFormat::NV12);
case gfx::SurfaceFormat::YUV422:
return Some(VideoPixelFormat::I422);
default:
break;
}
return Nothing();
}
Maybe<VideoPixelFormat> ImageBitmapFormatToVideoPixelFormat(
ImageBitmapFormat aFormat) {
switch (aFormat) {
case ImageBitmapFormat::RGBA32:
return Some(VideoPixelFormat::RGBA);
case ImageBitmapFormat::BGRA32:
return Some(VideoPixelFormat::BGRA);
case ImageBitmapFormat::YUV444P:
return Some(VideoPixelFormat::I444);
case ImageBitmapFormat::YUV422P:
return Some(VideoPixelFormat::I422);
case ImageBitmapFormat::YUV420P:
return Some(VideoPixelFormat::I420);
case ImageBitmapFormat::YUV420SP_NV12:
return Some(VideoPixelFormat::NV12);
default:
break;
}
return Nothing();
}
} // namespace mozilla::dom

Просмотреть файл

@ -10,29 +10,11 @@
#include <tuple>
#include "ErrorList.h"
#include "js/TypeDecls.h"
#include "mozilla/Maybe.h"
#include "mozilla/Result.h"
#include "mozilla/Span.h"
#include "mozilla/dom/UnionTypes.h"
namespace mozilla {
namespace gfx {
enum class ColorRange : uint8_t;
enum class ColorSpace2 : uint8_t;
enum class SurfaceFormat : int8_t;
enum class TransferFunction : uint8_t;
enum class YUVColorSpace : uint8_t;
} // namespace gfx
namespace dom {
/*
* The followings are helpers for VideoDecoder methods
*/
nsTArray<nsCString> GuessContainers(const nsString& aCodec);
namespace mozilla::dom {
/*
* Below are helpers to operate ArrayBuffer or ArrayBufferView.
@ -47,54 +29,6 @@ Result<Span<uint8_t>, nsresult> GetSharedArrayBufferData(
Result<Span<uint8_t>, nsresult> GetSharedArrayBufferData(
const OwningMaybeSharedArrayBufferViewOrMaybeSharedArrayBuffer& aBuffer);
Result<OwningMaybeSharedArrayBufferViewOrMaybeSharedArrayBuffer, nsresult>
CloneBuffer(
JSContext* aCx,
const OwningMaybeSharedArrayBufferViewOrMaybeSharedArrayBuffer& aBuffer);
/*
* The following are utilities to convert between VideoColorSpace values to
* gfx's values.
*/
enum class VideoColorPrimaries : uint8_t;
enum class VideoMatrixCoefficients : uint8_t;
enum class VideoTransferCharacteristics : uint8_t;
gfx::ColorRange ToColorRange(bool aIsFullRange);
gfx::YUVColorSpace ToColorSpace(VideoMatrixCoefficients aMatrix);
gfx::TransferFunction ToTransferFunction(
VideoTransferCharacteristics aTransfer);
gfx::ColorSpace2 ToPrimaries(VideoColorPrimaries aPrimaries);
bool ToFullRange(const gfx::ColorRange& aColorRange);
Maybe<VideoMatrixCoefficients> ToMatrixCoefficients(
const gfx::YUVColorSpace& aColorSpace);
Maybe<VideoTransferCharacteristics> ToTransferCharacteristics(
const gfx::TransferFunction& aTransferFunction);
Maybe<VideoColorPrimaries> ToPrimaries(const gfx::ColorSpace2& aColorSpace);
/*
* The following are utilities to convert from gfx's formats to
* VideoPixelFormats.
*/
enum class ImageBitmapFormat : uint8_t;
enum class VideoPixelFormat : uint8_t;
Maybe<VideoPixelFormat> SurfaceFormatToVideoPixelFormat(
gfx::SurfaceFormat aFormat);
Maybe<VideoPixelFormat> ImageBitmapFormatToVideoPixelFormat(
ImageBitmapFormat aFormat);
} // namespace dom
} // namespace mozilla
} // namespace mozilla::dom
#endif // mozilla_webcodecs_Utils

Просмотреть файл

@ -6,35 +6,18 @@
MOCHITEST_MANIFESTS += ["test/mochitest.ini"]
# For mozilla/layers/ImageBridgeChild.h
LOCAL_INCLUDES += [
"!/ipc/ipdl/_ipdlheaders",
"/ipc/chromium/src/",
]
EXPORTS.mozilla += [
"DecoderAgent.h",
]
EXPORTS.mozilla.dom += [
"EncodedVideoChunk.h",
"VideoColorSpace.h",
"VideoDecoder.h",
"VideoFrame.h",
"WebCodecsUtils.h",
]
UNIFIED_SOURCES += [
"DecoderAgent.cpp",
"EncodedVideoChunk.cpp",
"VideoColorSpace.cpp",
"VideoDecoder.cpp",
"VideoFrame.cpp",
"WebCodecsUtils.cpp",
]
if CONFIG["MOZ_WAYLAND"]:
CXXFLAGS += CONFIG["MOZ_WAYLAND_CFLAGS"]
CFLAGS += CONFIG["MOZ_WAYLAND_CFLAGS"]
FINAL_LIBRARY = "xul"

Просмотреть файл

@ -1397,8 +1397,6 @@ let interfaceNamesInGlobalScope = [
// IMPORTANT: Do not change this list without review from a DOM peer!
{ name: "VideoColorSpace", insecureContext: true, nightly: true },
// IMPORTANT: Do not change this list without review from a DOM peer!
{ name: "VideoDecoder", nightly: true },
// IMPORTANT: Do not change this list without review from a DOM peer!
{ name: "VideoFrame", insecureContext: true, nightly: true },
// IMPORTANT: Do not change this list without review from a DOM peer!
{ name: "VideoPlaybackQuality", insecureContext: true },

Просмотреть файл

@ -1,71 +0,0 @@
/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/.
*
* The origin of this IDL file is
* https://w3c.github.io/webcodecs/#videodecoder
*/
[Exposed=(Window,DedicatedWorker), SecureContext, Pref="dom.media.webcodecs.enabled"]
interface VideoDecoder : EventTarget {
[Throws]
constructor(VideoDecoderInit init);
readonly attribute CodecState state;
readonly attribute unsigned long decodeQueueSize;
attribute EventHandler ondequeue;
[Throws]
undefined configure(VideoDecoderConfig config);
[Throws]
undefined decode(EncodedVideoChunk chunk);
[NewObject, Throws]
Promise<undefined> flush();
[Throws]
undefined reset();
[Throws]
undefined close();
[NewObject, Throws]
static Promise<VideoDecoderSupport> isConfigSupported(VideoDecoderConfig config);
};
dictionary VideoDecoderInit {
required VideoFrameOutputCallback output;
required WebCodecsErrorCallback error;
};
callback VideoFrameOutputCallback = undefined(VideoFrame output);
dictionary VideoDecoderSupport {
boolean supported;
VideoDecoderConfig config;
};
dictionary VideoDecoderConfig {
required DOMString codec;
// Bug 1696216: Should be 1696216 [AllowShared] BufferSource description;
([AllowShared] ArrayBufferView or [AllowShared] ArrayBuffer) description;
[EnforceRange] unsigned long codedWidth;
[EnforceRange] unsigned long codedHeight;
[EnforceRange] unsigned long displayAspectWidth;
[EnforceRange] unsigned long displayAspectHeight;
VideoColorSpaceInit colorSpace;
HardwareAcceleration hardwareAcceleration = "no-preference";
boolean optimizeForLatency;
};
enum HardwareAcceleration {
"no-preference",
"prefer-hardware",
"prefer-software",
};
enum CodecState {
"unconfigured",
"configured",
"closed"
};
callback WebCodecsErrorCallback = undefined(DOMException error);

Просмотреть файл

@ -977,7 +977,6 @@ WEBIDL_FILES = [
"URLSearchParams.webidl",
"ValidityState.webidl",
"VideoColorSpace.webidl",
"VideoDecoder.webidl",
"VideoFrame.webidl",
"VideoPlaybackQuality.webidl",
"VideoTrack.webidl",

Просмотреть файл

@ -348,8 +348,6 @@ let interfaceNamesInGlobalScope = [
// IMPORTANT: Do not change this list without review from a DOM peer!
{ name: "VideoColorSpace", insecureContext: true, nightly: true },
// IMPORTANT: Do not change this list without review from a DOM peer!
{ name: "VideoDecoder", nightly: true },
// IMPORTANT: Do not change this list without review from a DOM peer!
{ name: "VideoFrame", insecureContext: true, nightly: true },
// IMPORTANT: Do not change this list without review from a DOM peer!
{ name: "WebGL2RenderingContext", insecureContext: true },

Просмотреть файл

@ -45,6 +45,51 @@ prefs: [dom.media.webcodecs.enabled:true]
[AudioDecoder interface: operation isConfigSupported(AudioDecoderConfig)]
expected: FAIL
[VideoDecoder interface: existence and properties of interface object]
expected: FAIL
[VideoDecoder interface object length]
expected: FAIL
[VideoDecoder interface object name]
expected: FAIL
[VideoDecoder interface: existence and properties of interface prototype object]
expected: FAIL
[VideoDecoder interface: existence and properties of interface prototype object's "constructor" property]
expected: FAIL
[VideoDecoder interface: existence and properties of interface prototype object's @@unscopables property]
expected: FAIL
[VideoDecoder interface: attribute state]
expected: FAIL
[VideoDecoder interface: attribute decodeQueueSize]
expected: FAIL
[VideoDecoder interface: attribute ondequeue]
expected: FAIL
[VideoDecoder interface: operation configure(VideoDecoderConfig)]
expected: FAIL
[VideoDecoder interface: operation decode(EncodedVideoChunk)]
expected: FAIL
[VideoDecoder interface: operation flush()]
expected: FAIL
[VideoDecoder interface: operation reset()]
expected: FAIL
[VideoDecoder interface: operation close()]
expected: FAIL
[VideoDecoder interface: operation isConfigSupported(VideoDecoderConfig)]
expected: FAIL
[AudioEncoder interface: existence and properties of interface object]
expected: FAIL
@ -374,6 +419,51 @@ prefs: [dom.media.webcodecs.enabled:true]
[AudioDecoder interface: operation isConfigSupported(AudioDecoderConfig)]
expected: FAIL
[VideoDecoder interface: existence and properties of interface object]
expected: FAIL
[VideoDecoder interface object length]
expected: FAIL
[VideoDecoder interface object name]
expected: FAIL
[VideoDecoder interface: existence and properties of interface prototype object]
expected: FAIL
[VideoDecoder interface: existence and properties of interface prototype object's "constructor" property]
expected: FAIL
[VideoDecoder interface: existence and properties of interface prototype object's @@unscopables property]
expected: FAIL
[VideoDecoder interface: attribute state]
expected: FAIL
[VideoDecoder interface: attribute decodeQueueSize]
expected: FAIL
[VideoDecoder interface: attribute ondequeue]
expected: FAIL
[VideoDecoder interface: operation configure(VideoDecoderConfig)]
expected: FAIL
[VideoDecoder interface: operation decode(EncodedVideoChunk)]
expected: FAIL
[VideoDecoder interface: operation flush()]
expected: FAIL
[VideoDecoder interface: operation reset()]
expected: FAIL
[VideoDecoder interface: operation close()]
expected: FAIL
[VideoDecoder interface: operation isConfigSupported(VideoDecoderConfig)]
expected: FAIL
[AudioEncoder interface: existence and properties of interface object]
expected: FAIL

Просмотреть файл

@ -1,30 +1,16 @@
[video-decoder.crossOriginIsolated.https.any.html]
prefs: [dom.media.webcodecs.enabled:true]
expected:
if (os == "android") and fission: [OK, TIMEOUT]
[Test isConfigSupported() and configure() using a SharedArrayBuffer]
expected:
if (os != "linux"): [PRECONDITION_FAILED]
PASS
expected: FAIL
[Test isConfigSupported() and configure() using a Uint8Array(SharedArrayBuffer)]
expected:
if (os != "linux"): [PRECONDITION_FAILED]
PASS
expected: FAIL
[video-decoder.crossOriginIsolated.https.any.worker.html]
prefs: [dom.media.webcodecs.enabled:true]
expected:
if (os == "android") and debug and not swgl: [OK, TIMEOUT]
[Test isConfigSupported() and configure() using a SharedArrayBuffer]
expected:
if (os != "linux"): [PRECONDITION_FAILED]
PASS
expected: FAIL
[Test isConfigSupported() and configure() using a Uint8Array(SharedArrayBuffer)]
expected:
if (os != "linux"): [PRECONDITION_FAILED]
PASS
expected: FAIL

Просмотреть файл

@ -1,10 +1,72 @@
[video-decoder.https.any.html]
prefs: [dom.media.webcodecs.enabled:true]
expected:
if (os == "android") and fission: [OK, TIMEOUT]
[Test that VideoDecoder.isConfigSupported() rejects invalid config:Empty codec]
expected: FAIL
[Test that VideoDecoder.isConfigSupported() rejects invalid config:Unrecognized codec]
expected: FAIL
[Test that VideoDecoder.isConfigSupported() rejects invalid config:Audio codec]
expected: FAIL
[Test that VideoDecoder.isConfigSupported() rejects invalid config:Ambiguous codec]
expected: FAIL
[Test that VideoDecoder.isConfigSupported() rejects invalid config:Codec with MIME type]
expected: FAIL
[Test that VideoDecoder.configure() rejects invalid config:Empty codec]
expected: FAIL
[Test that VideoDecoder.configure() rejects invalid config:Unrecognized codec]
expected: FAIL
[Test that VideoDecoder.configure() rejects invalid config:Audio codec]
expected: FAIL
[Test that VideoDecoder.configure() rejects invalid config:Ambiguous codec]
expected: FAIL
[Test that VideoDecoder.configure() rejects invalid config:Codec with MIME type]
expected: FAIL
[Test VideoDecoder construction]
expected: FAIL
[video-decoder.https.any.worker.html]
prefs: [dom.media.webcodecs.enabled:true]
expected:
if (os == "android") and fission: [OK, TIMEOUT]
[Test that VideoDecoder.isConfigSupported() rejects invalid config:Empty codec]
expected: FAIL
[Test that VideoDecoder.isConfigSupported() rejects invalid config:Unrecognized codec]
expected: FAIL
[Test that VideoDecoder.isConfigSupported() rejects invalid config:Audio codec]
expected: FAIL
[Test that VideoDecoder.isConfigSupported() rejects invalid config:Ambiguous codec]
expected: FAIL
[Test that VideoDecoder.isConfigSupported() rejects invalid config:Codec with MIME type]
expected: FAIL
[Test that VideoDecoder.configure() rejects invalid config:Empty codec]
expected: FAIL
[Test that VideoDecoder.configure() rejects invalid config:Unrecognized codec]
expected: FAIL
[Test that VideoDecoder.configure() rejects invalid config:Audio codec]
expected: FAIL
[Test that VideoDecoder.configure() rejects invalid config:Ambiguous codec]
expected: FAIL
[Test that VideoDecoder.configure() rejects invalid config:Codec with MIME type]
expected: FAIL
[Test VideoDecoder construction]
expected: FAIL

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -363,10 +363,7 @@ promise_test(async t => {
const callbacks = {};
let errors = 0;
let gotError = new Promise(resolve => callbacks.error = e => {
errors++;
resolve(e);
});
callbacks.error = e => errors++;
callbacks.output = frame => { frame.close(); };
const decoder = createVideoDecoder(t, callbacks);
@ -375,16 +372,9 @@ promise_test(async t => {
decoder.decode(new EncodedVideoChunk(
{type: 'key', timestamp: 1, data: new ArrayBuffer(0)}));
await promise_rejects_dom(t, "EncodingError",
decoder.flush().catch((e) => {
assert_equals(errors, 0);
throw e;
})
);
await promise_rejects_dom(t, 'AbortError', decoder.flush());
let e = await gotError;
assert_true(e instanceof DOMException);
assert_equals(e.name, 'EncodingError');
assert_equals(errors, 1, 'errors');
assert_equals(decoder.state, 'closed', 'state');
}, 'Decode empty frame');
@ -394,10 +384,7 @@ promise_test(async t => {
const callbacks = {};
let errors = 0;
let gotError = new Promise(resolve => callbacks.error = e => {
errors++;
resolve(e);
});
callbacks.error = e => errors++;
let outputs = 0;
callbacks.output = frame => {
@ -410,17 +397,10 @@ promise_test(async t => {
decoder.decode(CHUNKS[0]); // Decode keyframe first.
decoder.decode(createCorruptChunk(2));
await promise_rejects_dom(t, "EncodingError",
decoder.flush().catch((e) => {
assert_equals(errors, 0);
throw e;
})
);
await promise_rejects_dom(t, 'AbortError', decoder.flush());
assert_less_than_equal(outputs, 1);
let e = await gotError;
assert_true(e instanceof DOMException);
assert_equals(e.name, 'EncodingError');
assert_equals(errors, 1, 'errors');
assert_equals(decoder.state, 'closed', 'state');
}, 'Decode corrupt frame');

Просмотреть файл

@ -2429,8 +2429,6 @@ STATIC_ATOMS = [
Atom("onupdateend", "onupdateend"),
Atom("onaddsourcebuffer", "onaddsourcebuffer"),
Atom("onremovesourcebuffer", "onremovesourcebuffer"),
# WebCodecs
Atom("ondequeue", "ondequeue"),
# RDF (not used by mozilla-central, but still used by comm-central)
Atom("about", "about"),
Atom("ID", "ID"),