Bug 1608577: Part 1 - Remove |using namespace mozilla::java| from dom/media; r=kinetik

Differential Revision: https://phabricator.services.mozilla.com/D59796

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Aaron Klotz 2020-01-13 22:04:02 +00:00
Родитель 2fe4803a51
Коммит c4f1b7b2d0
7 изменённых файлов: 123 добавлений и 121 удалений

Просмотреть файл

@ -304,7 +304,6 @@ static nsTArray<KeySystemConfig> GetSupportedKeySystems() {
widevine.mEncryptionSchemes.AppendElement(NS_LITERAL_STRING("cbcs"));
#if defined(MOZ_WIDGET_ANDROID)
using namespace mozilla::java;
// MediaDrm.isCryptoSchemeSupported only allows passing
// "video/mp4" or "video/webm" for mimetype string.
// See
@ -318,29 +317,29 @@ static nsTArray<KeySystemConfig> GetSupportedKeySystems() {
} DataForValidation;
DataForValidation validationList[] = {
{nsCString(VIDEO_MP4), EME_CODEC_H264, MediaDrmProxy::AVC,
{nsCString(VIDEO_MP4), EME_CODEC_H264, java::MediaDrmProxy::AVC,
&widevine.mMP4},
{nsCString(VIDEO_MP4), EME_CODEC_VP9, MediaDrmProxy::AVC,
{nsCString(VIDEO_MP4), EME_CODEC_VP9, java::MediaDrmProxy::AVC,
&widevine.mMP4},
{nsCString(AUDIO_MP4), EME_CODEC_AAC, MediaDrmProxy::AAC,
{nsCString(AUDIO_MP4), EME_CODEC_AAC, java::MediaDrmProxy::AAC,
&widevine.mMP4},
{nsCString(AUDIO_MP4), EME_CODEC_FLAC, MediaDrmProxy::FLAC,
{nsCString(AUDIO_MP4), EME_CODEC_FLAC, java::MediaDrmProxy::FLAC,
&widevine.mMP4},
{nsCString(AUDIO_MP4), EME_CODEC_OPUS, MediaDrmProxy::OPUS,
{nsCString(AUDIO_MP4), EME_CODEC_OPUS, java::MediaDrmProxy::OPUS,
&widevine.mMP4},
{nsCString(VIDEO_WEBM), EME_CODEC_VP8, MediaDrmProxy::VP8,
{nsCString(VIDEO_WEBM), EME_CODEC_VP8, java::MediaDrmProxy::VP8,
&widevine.mWebM},
{nsCString(VIDEO_WEBM), EME_CODEC_VP9, MediaDrmProxy::VP9,
{nsCString(VIDEO_WEBM), EME_CODEC_VP9, java::MediaDrmProxy::VP9,
&widevine.mWebM},
{nsCString(AUDIO_WEBM), EME_CODEC_VORBIS, MediaDrmProxy::VORBIS,
{nsCString(AUDIO_WEBM), EME_CODEC_VORBIS, java::MediaDrmProxy::VORBIS,
&widevine.mWebM},
{nsCString(AUDIO_WEBM), EME_CODEC_OPUS, MediaDrmProxy::OPUS,
{nsCString(AUDIO_WEBM), EME_CODEC_OPUS, java::MediaDrmProxy::OPUS,
&widevine.mWebM},
};
for (const auto& data : validationList) {
if (MediaDrmProxy::IsCryptoSchemeSupported(EME_KEY_SYSTEM_WIDEVINE,
data.mMimeType)) {
if (java::MediaDrmProxy::IsCryptoSchemeSupported(
EME_KEY_SYSTEM_WIDEVINE, data.mMimeType)) {
if (AndroidDecoderModule::SupportsMimeType(data.mMimeType)) {
data.mSupportType->SetCanDecryptAndDecode(data.mEMECodecType);
} else {

Просмотреть файл

@ -20,8 +20,6 @@
#include "MediaCodec.h"
#include "nsString.h"
using namespace mozilla::java;
namespace mozilla {
class MediaDrmCDMCallbackProxy;

Просмотреть файл

@ -9,8 +9,6 @@
#include "GeneratedJNINatives.h"
#include "MediaCodec.h" // For MediaDrm::KeyStatus
using namespace mozilla::java;
namespace mozilla {
LogModule* GetMDRMNLog() {
@ -19,10 +17,10 @@ LogModule* GetMDRMNLog() {
}
class MediaDrmJavaCallbacksSupport
: public MediaDrmProxy::NativeMediaDrmProxyCallbacks::Natives<
: public java::MediaDrmProxy::NativeMediaDrmProxyCallbacks::Natives<
MediaDrmJavaCallbacksSupport> {
public:
typedef MediaDrmProxy::NativeMediaDrmProxyCallbacks::Natives<
typedef java::MediaDrmProxy::NativeMediaDrmProxyCallbacks::Natives<
MediaDrmJavaCallbacksSupport>
MediaDrmProxyNativeCallbacks;
using MediaDrmProxyNativeCallbacks::AttachNative;
@ -191,9 +189,9 @@ void MediaDrmJavaCallbacksSupport::OnRejectPromise(
MediaDrmProxySupport::MediaDrmProxySupport(const nsAString& aKeySystem)
: mKeySystem(aKeySystem), mDestroyed(false) {
mJavaCallbacks = MediaDrmProxy::NativeMediaDrmProxyCallbacks::New();
mJavaCallbacks = java::MediaDrmProxy::NativeMediaDrmProxyCallbacks::New();
mBridgeProxy = MediaDrmProxy::Create(mKeySystem, mJavaCallbacks);
mBridgeProxy = java::MediaDrmProxy::Create(mKeySystem, mJavaCallbacks);
MOZ_ASSERT(mBridgeProxy, "mBridgeProxy should not be null");
mMediaDrmStubId = mBridgeProxy->GetStubId()->ToString();

Просмотреть файл

@ -20,16 +20,14 @@
#include "nsThreadUtils.h"
#include "mozilla/StaticPrefs_media.h"
using namespace mozilla::java;
namespace mozilla {
class HLSResourceCallbacksSupport
: public GeckoHLSResourceWrapper::Callbacks::Natives<
: public java::GeckoHLSResourceWrapper::Callbacks::Natives<
HLSResourceCallbacksSupport> {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(HLSResourceCallbacksSupport)
public:
typedef GeckoHLSResourceWrapper::Callbacks::Natives<
typedef java::GeckoHLSResourceWrapper::Callbacks::Natives<
HLSResourceCallbacksSupport>
NativeCallbacks;
using NativeCallbacks::AttachNative;
@ -151,7 +149,7 @@ nsresult HLSDecoder::Load(nsIChannel* aChannel) {
Unused << mURI->GetSpec(spec);
;
HLSResourceCallbacksSupport::Init();
mJavaCallbacks = GeckoHLSResourceWrapper::Callbacks::New();
mJavaCallbacks = java::GeckoHLSResourceWrapper::Callbacks::New();
mCallbackSupport = new HLSResourceCallbacksSupport(this);
HLSResourceCallbacksSupport::AttachNative(mJavaCallbacks, mCallbackSupport);
mHLSResourceWrapper = java::GeckoHLSResourceWrapper::Create(

Просмотреть файл

@ -15,8 +15,6 @@
#include "mozilla/Unused.h"
#include "nsPrintfCString.h"
using namespace mozilla::java;
namespace mozilla {
static Atomic<uint32_t> sStreamSourceID(0u);
@ -61,11 +59,12 @@ static mozilla::StereoMode getStereoMode(int aMode) {
// We ensure the callback will never be invoked after
// HLSDemuxerCallbacksSupport::DisposeNative has been called in ~HLSDemuxer.
class HLSDemuxer::HLSDemuxerCallbacksSupport
: public GeckoHLSDemuxerWrapper::Callbacks::Natives<
: public java::GeckoHLSDemuxerWrapper::Callbacks::Natives<
HLSDemuxerCallbacksSupport> {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(HLSDemuxerCallbacksSupport)
public:
typedef GeckoHLSDemuxerWrapper::Callbacks::Natives<HLSDemuxerCallbacksSupport>
typedef java::GeckoHLSDemuxerWrapper::Callbacks::Natives<
HLSDemuxerCallbacksSupport>
NativeCallbacks;
using NativeCallbacks::AttachNative;
using NativeCallbacks::DisposeNative;
@ -129,14 +128,14 @@ HLSDemuxer::HLSDemuxer(int aPlayerId)
/* aSupportsTailDispatch = */ false)) {
MOZ_ASSERT(NS_IsMainThread());
HLSDemuxerCallbacksSupport::Init();
mJavaCallbacks = GeckoHLSDemuxerWrapper::Callbacks::New();
mJavaCallbacks = java::GeckoHLSDemuxerWrapper::Callbacks::New();
MOZ_ASSERT(mJavaCallbacks);
mCallbackSupport = new HLSDemuxerCallbacksSupport(this);
HLSDemuxerCallbacksSupport::AttachNative(mJavaCallbacks, mCallbackSupport);
mHLSDemuxerWrapper =
GeckoHLSDemuxerWrapper::Create(aPlayerId, mJavaCallbacks);
java::GeckoHLSDemuxerWrapper::Create(aPlayerId, mJavaCallbacks);
MOZ_ASSERT(mHLSDemuxerWrapper);
}

Просмотреть файл

@ -16,9 +16,6 @@
#include "libyuv.h"
using namespace mozilla::java;
using namespace mozilla::java::sdk;
namespace mozilla {
using media::TimeUnit;
@ -53,7 +50,7 @@ static const char* MimeTypeOf(MediaDataEncoder::CodecType aCodec) {
}
}
using FormatResult = Result<MediaFormat::LocalRef, MediaResult>;
using FormatResult = Result<java::sdk::MediaFormat::LocalRef, MediaResult>;
FormatResult ToMediaFormat(const AndroidDataEncoder::Config& aConfig) {
if (!aConfig.mCodecSpecific) {
@ -63,29 +60,32 @@ FormatResult ToMediaFormat(const AndroidDataEncoder::Config& aConfig) {
}
nsresult rv = NS_OK;
MediaFormat::LocalRef format;
rv = MediaFormat::CreateVideoFormat(MimeTypeOf(aConfig.mCodecType),
aConfig.mSize.width, aConfig.mSize.height,
&format);
java::sdk::MediaFormat::LocalRef format;
rv = java::sdk::MediaFormat::CreateVideoFormat(MimeTypeOf(aConfig.mCodecType),
aConfig.mSize.width,
aConfig.mSize.height, &format);
NS_ENSURE_SUCCESS(
rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
"fail to create Java MediaFormat object")));
rv = format->SetInteger(MediaFormat::KEY_BITRATE_MODE, 2 /* CBR */);
rv =
format->SetInteger(java::sdk::MediaFormat::KEY_BITRATE_MODE, 2 /* CBR */);
NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
"fail to set bitrate mode")));
rv = format->SetInteger(MediaFormat::KEY_BIT_RATE, aConfig.mBitsPerSec);
rv = format->SetInteger(java::sdk::MediaFormat::KEY_BIT_RATE,
aConfig.mBitsPerSec);
NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
"fail to set bitrate")));
// COLOR_FormatYUV420SemiPlanar(NV12) is the most widely supported
// format.
rv = format->SetInteger(MediaFormat::KEY_COLOR_FORMAT, 0x15);
rv = format->SetInteger(java::sdk::MediaFormat::KEY_COLOR_FORMAT, 0x15);
NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
"fail to set color format")));
rv = format->SetInteger(MediaFormat::KEY_FRAME_RATE, aConfig.mFramerate);
rv = format->SetInteger(java::sdk::MediaFormat::KEY_FRAME_RATE,
aConfig.mFramerate);
NS_ENSURE_SUCCESS(rv, FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
"fail to set frame rate")));
@ -94,7 +94,8 @@ FormatResult ToMediaFormat(const AndroidDataEncoder::Config& aConfig) {
// containing all key frames is requested.
int32_t intervalInSec = std::max<size_t>(
1, aConfig.mCodecSpecific.value().mKeyframeInterval / aConfig.mFramerate);
rv = format->SetInteger(MediaFormat::KEY_I_FRAME_INTERVAL, intervalInSec);
rv = format->SetInteger(java::sdk::MediaFormat::KEY_I_FRAME_INTERVAL,
intervalInSec);
NS_ENSURE_SUCCESS(rv,
FormatResult(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
"fail to set I-frame interval")));
@ -106,8 +107,8 @@ RefPtr<MediaDataEncoder::InitPromise> AndroidDataEncoder::ProcessInit() {
AssertOnTaskQueue();
MOZ_ASSERT(!mJavaEncoder);
BufferInfo::LocalRef bufferInfo;
if (NS_FAILED(BufferInfo::New(&bufferInfo)) || !bufferInfo) {
java::sdk::BufferInfo::LocalRef bufferInfo;
if (NS_FAILED(java::sdk::BufferInfo::New(&bufferInfo)) || !bufferInfo) {
return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
}
mInputBufferInfo = bufferInfo;
@ -121,7 +122,7 @@ RefPtr<MediaDataEncoder::InitPromise> AndroidDataEncoder::ProcessInit() {
// Register native methods.
JavaCallbacksSupport::Init();
mJavaCallbacks = CodecProxy::NativeCallbacks::New();
mJavaCallbacks = java::CodecProxy::NativeCallbacks::New();
if (!mJavaCallbacks) {
return InitPromise::CreateAndReject(
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
@ -131,8 +132,8 @@ RefPtr<MediaDataEncoder::InitPromise> AndroidDataEncoder::ProcessInit() {
JavaCallbacksSupport::AttachNative(
mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
mJavaEncoder = CodecProxy::Create(true /* encoder */, mFormat, nullptr,
mJavaCallbacks, EmptyString());
mJavaEncoder = java::CodecProxy::Create(true /* encoder */, mFormat, nullptr,
mJavaCallbacks, EmptyString());
if (!mJavaEncoder) {
return InitPromise::CreateAndReject(
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
@ -200,7 +201,7 @@ RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::ProcessEncode(
if (aSample->mKeyframe) {
mInputBufferInfo->Set(0, mYUVBuffer->Length(),
aSample->mTime.ToMicroseconds(),
MediaCodec::BUFFER_FLAG_SYNC_FRAME);
java::sdk::MediaCodec::BUFFER_FLAG_SYNC_FRAME);
} else {
mInputBufferInfo->Set(0, mYUVBuffer->Length(),
aSample->mTime.ToMicroseconds(), 0);
@ -219,20 +220,19 @@ RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::ProcessEncode(
class AutoRelease final {
public:
AutoRelease(CodecProxy::Param aEncoder, Sample::Param aSample)
AutoRelease(java::CodecProxy::Param aEncoder, java::Sample::Param aSample)
: mEncoder(aEncoder), mSample(aSample) {}
~AutoRelease() { mEncoder->ReleaseOutput(mSample, false); }
private:
CodecProxy::GlobalRef mEncoder;
Sample::GlobalRef mSample;
java::CodecProxy::GlobalRef mEncoder;
java::Sample::GlobalRef mSample;
};
static RefPtr<MediaByteBuffer> ExtractCodecConfig(SampleBuffer::Param aBuffer,
const int32_t aOffset,
const int32_t aSize,
const bool aAsAnnexB) {
static RefPtr<MediaByteBuffer> ExtractCodecConfig(
java::SampleBuffer::Param aBuffer, const int32_t aOffset,
const int32_t aSize, const bool aAsAnnexB) {
auto annexB = MakeRefPtr<MediaByteBuffer>(aSize);
annexB->SetLength(aSize);
jni::ByteBuffer::LocalRef dest =
@ -257,11 +257,13 @@ static RefPtr<MediaByteBuffer> ExtractCodecConfig(SampleBuffer::Param aBuffer,
return avcc;
}
void AndroidDataEncoder::ProcessOutput(Sample::GlobalRef&& aSample,
SampleBuffer::GlobalRef&& aBuffer) {
void AndroidDataEncoder::ProcessOutput(
java::Sample::GlobalRef&& aSample,
java::SampleBuffer::GlobalRef&& aBuffer) {
if (!mTaskQueue->IsCurrentThreadIn()) {
nsresult rv = mTaskQueue->Dispatch(
NewRunnableMethod<Sample::GlobalRef&&, SampleBuffer::GlobalRef&&>(
nsresult rv =
mTaskQueue->Dispatch(NewRunnableMethod<java::Sample::GlobalRef&&,
java::SampleBuffer::GlobalRef&&>(
"AndroidDataEncoder::ProcessOutput", this,
&AndroidDataEncoder::ProcessOutput, std::move(aSample),
std::move(aBuffer)));
@ -277,12 +279,12 @@ void AndroidDataEncoder::ProcessOutput(Sample::GlobalRef&& aSample,
AutoRelease releaseSample(mJavaEncoder, aSample);
BufferInfo::LocalRef info = aSample->Info();
java::sdk::BufferInfo::LocalRef info = aSample->Info();
MOZ_ASSERT(info);
int32_t flags;
bool ok = NS_SUCCEEDED(info->Flags(&flags));
bool isEOS = !!(flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM);
bool isEOS = !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
int32_t offset;
ok &= NS_SUCCEEDED(info->Offset(&offset));
@ -298,13 +300,14 @@ void AndroidDataEncoder::ProcessOutput(Sample::GlobalRef&& aSample,
}
if (size > 0) {
if ((flags & MediaCodec::BUFFER_FLAG_CODEC_CONFIG) != 0) {
if ((flags & java::sdk::MediaCodec::BUFFER_FLAG_CODEC_CONFIG) != 0) {
mConfigData = ExtractCodecConfig(aBuffer, offset, size,
mConfig.mUsage == Usage::Realtime);
return;
}
RefPtr<MediaRawData> output = GetOutputData(
aBuffer, offset, size, !!(flags & MediaCodec::BUFFER_FLAG_KEY_FRAME));
RefPtr<MediaRawData> output =
GetOutputData(aBuffer, offset, size,
!!(flags & java::sdk::MediaCodec::BUFFER_FLAG_KEY_FRAME));
output->mEOS = isEOS;
output->mTime = media::TimeUnit::FromMicroseconds(presentationTimeUs);
mEncodedData.AppendElement(std::move(output));
@ -321,8 +324,8 @@ void AndroidDataEncoder::ProcessOutput(Sample::GlobalRef&& aSample,
}
RefPtr<MediaRawData> AndroidDataEncoder::GetOutputData(
SampleBuffer::Param aBuffer, const int32_t aOffset, const int32_t aSize,
const bool aIsKeyFrame) {
java::SampleBuffer::Param aBuffer, const int32_t aOffset,
const int32_t aSize, const bool aIsKeyFrame) {
auto output = MakeRefPtr<MediaRawData>();
size_t prependSize = 0;
@ -374,7 +377,8 @@ RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::ProcessDrain() {
switch (mDrainState) {
case DrainState::DRAINABLE:
mInputBufferInfo->Set(0, 0, -1, MediaCodec::BUFFER_FLAG_END_OF_STREAM);
mInputBufferInfo->Set(0, 0, -1,
java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
mJavaEncoder->Input(nullptr, mInputBufferInfo, nullptr);
mDrainState = DrainState::DRAINING;
[[fallthrough]];
@ -445,12 +449,12 @@ void AndroidDataEncoder::CallbacksSupport::HandleInput(int64_t aTimestamp,
bool aProcessed) {}
void AndroidDataEncoder::CallbacksSupport::HandleOutput(
Sample::Param aSample, SampleBuffer::Param aBuffer) {
java::Sample::Param aSample, java::SampleBuffer::Param aBuffer) {
mEncoder->ProcessOutput(std::move(aSample), std::move(aBuffer));
}
void AndroidDataEncoder::CallbacksSupport::HandleOutputFormatChanged(
MediaFormat::Param aFormat) {}
java::sdk::MediaFormat::Param aFormat) {}
void AndroidDataEncoder::CallbacksSupport::HandleError(
const MediaResult& aError) {

Просмотреть файл

@ -28,8 +28,6 @@
using namespace mozilla;
using namespace mozilla::gl;
using namespace mozilla::java;
using namespace mozilla::java::sdk;
using media::TimeUnit;
namespace mozilla {
@ -38,7 +36,8 @@ namespace mozilla {
// the Java codec (for rendering or not).
class RenderOrReleaseOutput {
public:
RenderOrReleaseOutput(CodecProxy::Param aCodec, Sample::Param aSample)
RenderOrReleaseOutput(java::CodecProxy::Param aCodec,
java::Sample::Param aSample)
: mCodec(aCodec), mSample(aSample) {}
virtual ~RenderOrReleaseOutput() { ReleaseOutput(false); }
@ -53,8 +52,8 @@ class RenderOrReleaseOutput {
}
private:
CodecProxy::GlobalRef mCodec;
Sample::GlobalRef mSample;
java::CodecProxy::GlobalRef mCodec;
java::Sample::GlobalRef mSample;
};
class RemoteVideoDecoder : public RemoteDataDecoder {
@ -65,7 +64,8 @@ class RemoteVideoDecoder : public RemoteDataDecoder {
: private RenderOrReleaseOutput,
public layers::SurfaceTextureImage::SetCurrentCallback {
public:
CompositeListener(CodecProxy::Param aCodec, Sample::Param aSample)
CompositeListener(java::CodecProxy::Param aCodec,
java::Sample::Param aSample)
: RenderOrReleaseOutput(aCodec, aSample) {}
void operator()(void) override { ReleaseOutput(true); }
@ -95,7 +95,7 @@ class RemoteVideoDecoder : public RemoteDataDecoder {
mDecoder->UpdateInputStatus(aTimestamp, aProcessed);
}
void HandleOutput(Sample::Param aSample,
void HandleOutput(java::Sample::Param aSample,
java::SampleBuffer::Param aBuffer) override {
MOZ_ASSERT(!aBuffer, "Video sample should be bufferless");
// aSample will be implicitly converted into a GlobalRef.
@ -112,7 +112,8 @@ class RemoteVideoDecoder : public RemoteDataDecoder {
RemoteVideoDecoder* mDecoder;
};
RemoteVideoDecoder(const VideoInfo& aConfig, MediaFormat::Param aFormat,
RemoteVideoDecoder(const VideoInfo& aConfig,
java::sdk::MediaFormat::Param aFormat,
const nsString& aDrmStubId, TaskQueue* aTaskQueue)
: RemoteDataDecoder(MediaData::Type::VIDEO_DATA, aConfig.mMimeType,
aFormat, aDrmStubId, aTaskQueue),
@ -120,19 +121,20 @@ class RemoteVideoDecoder : public RemoteDataDecoder {
~RemoteVideoDecoder() {
if (mSurface) {
SurfaceAllocator::DisposeSurface(mSurface);
java::SurfaceAllocator::DisposeSurface(mSurface);
}
}
RefPtr<InitPromise> Init() override {
BufferInfo::LocalRef bufferInfo;
if (NS_FAILED(BufferInfo::New(&bufferInfo)) || !bufferInfo) {
java::sdk::BufferInfo::LocalRef bufferInfo;
if (NS_FAILED(java::sdk::BufferInfo::New(&bufferInfo)) || !bufferInfo) {
return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
}
mInputBufferInfo = bufferInfo;
mSurface = GeckoSurface::LocalRef(SurfaceAllocator::AcquireSurface(
mConfig.mImage.width, mConfig.mImage.height, false));
mSurface =
java::GeckoSurface::LocalRef(java::SurfaceAllocator::AcquireSurface(
mConfig.mImage.width, mConfig.mImage.height, false));
if (!mSurface) {
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
__func__);
@ -143,7 +145,7 @@ class RemoteVideoDecoder : public RemoteDataDecoder {
// Register native methods.
JavaCallbacksSupport::Init();
mJavaCallbacks = CodecProxy::NativeCallbacks::New();
mJavaCallbacks = java::CodecProxy::NativeCallbacks::New();
if (!mJavaCallbacks) {
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
__func__);
@ -151,7 +153,7 @@ class RemoteVideoDecoder : public RemoteDataDecoder {
JavaCallbacksSupport::AttachNative(
mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
mJavaDecoder = CodecProxy::Create(
mJavaDecoder = java::CodecProxy::Create(
false, // false indicates to create a decoder and true denotes encoder
mFormat, mSurface, mJavaCallbacks, mDrmStubId);
if (mJavaDecoder == nullptr) {
@ -239,11 +241,12 @@ class RemoteVideoDecoder : public RemoteDataDecoder {
// Param and LocalRef are only valid for the duration of a JNI method call.
// Use GlobalRef as the parameter type to keep the Java object referenced
// until running.
void ProcessOutput(Sample::GlobalRef&& aSample) {
void ProcessOutput(java::Sample::GlobalRef&& aSample) {
if (!mTaskQueue->IsCurrentThreadIn()) {
nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod<Sample::GlobalRef&&>(
"RemoteVideoDecoder::ProcessOutput", this,
&RemoteVideoDecoder::ProcessOutput, std::move(aSample)));
nsresult rv =
mTaskQueue->Dispatch(NewRunnableMethod<java::Sample::GlobalRef&&>(
"RemoteVideoDecoder::ProcessOutput", this,
&RemoteVideoDecoder::ProcessOutput, std::move(aSample)));
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
Unused << rv;
return;
@ -258,7 +261,7 @@ class RemoteVideoDecoder : public RemoteDataDecoder {
UniquePtr<layers::SurfaceTextureImage::SetCurrentCallback> releaseSample(
new CompositeListener(mJavaDecoder, aSample));
BufferInfo::LocalRef info = aSample->Info();
java::sdk::BufferInfo::LocalRef info = aSample->Info();
MOZ_ASSERT(info);
int32_t flags;
@ -281,7 +284,7 @@ class RemoteVideoDecoder : public RemoteDataDecoder {
InputInfo inputInfo;
ok = mInputInfos.Find(presentationTimeUs, inputInfo);
bool isEOS = !!(flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM);
bool isEOS = !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
if (!ok && !isEOS) {
// Ignore output with no corresponding input.
return;
@ -298,7 +301,7 @@ class RemoteVideoDecoder : public RemoteDataDecoder {
inputInfo.mDisplaySize, offset,
TimeUnit::FromMicroseconds(presentationTimeUs),
TimeUnit::FromMicroseconds(inputInfo.mDurationUs), img.forget(),
!!(flags & MediaCodec::BUFFER_FLAG_SYNC_FRAME),
!!(flags & java::sdk::MediaCodec::BUFFER_FLAG_SYNC_FRAME),
TimeUnit::FromMicroseconds(presentationTimeUs));
RemoteDataDecoder::UpdateOutputStatus(std::move(v));
@ -310,7 +313,7 @@ class RemoteVideoDecoder : public RemoteDataDecoder {
}
const VideoInfo mConfig;
GeckoSurface::GlobalRef mSurface;
java::GeckoSurface::GlobalRef mSurface;
AndroidSurfaceTextureHandle mSurfaceHandle;
// Only accessed on reader's task queue.
bool mIsCodecSupportAdaptivePlayback = false;
@ -326,7 +329,8 @@ class RemoteVideoDecoder : public RemoteDataDecoder {
class RemoteAudioDecoder : public RemoteDataDecoder {
public:
RemoteAudioDecoder(const AudioInfo& aConfig, MediaFormat::Param aFormat,
RemoteAudioDecoder(const AudioInfo& aConfig,
java::sdk::MediaFormat::Param aFormat,
const nsString& aDrmStubId, TaskQueue* aTaskQueue)
: RemoteDataDecoder(MediaData::Type::AUDIO_DATA, aConfig.mMimeType,
aFormat, aDrmStubId, aTaskQueue) {
@ -346,8 +350,8 @@ class RemoteAudioDecoder : public RemoteDataDecoder {
}
RefPtr<InitPromise> Init() override {
BufferInfo::LocalRef bufferInfo;
if (NS_FAILED(BufferInfo::New(&bufferInfo)) || !bufferInfo) {
java::sdk::BufferInfo::LocalRef bufferInfo;
if (NS_FAILED(java::sdk::BufferInfo::New(&bufferInfo)) || !bufferInfo) {
return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
}
mInputBufferInfo = bufferInfo;
@ -355,7 +359,7 @@ class RemoteAudioDecoder : public RemoteDataDecoder {
// Register native methods.
JavaCallbacksSupport::Init();
mJavaCallbacks = CodecProxy::NativeCallbacks::New();
mJavaCallbacks = java::CodecProxy::NativeCallbacks::New();
if (!mJavaCallbacks) {
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
__func__);
@ -363,8 +367,8 @@ class RemoteAudioDecoder : public RemoteDataDecoder {
JavaCallbacksSupport::AttachNative(
mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
mJavaDecoder =
CodecProxy::Create(false, mFormat, nullptr, mJavaCallbacks, mDrmStubId);
mJavaDecoder = java::CodecProxy::Create(false, mFormat, nullptr,
mJavaCallbacks, mDrmStubId);
if (mJavaDecoder == nullptr) {
return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
__func__);
@ -403,14 +407,15 @@ class RemoteAudioDecoder : public RemoteDataDecoder {
mDecoder->UpdateInputStatus(aTimestamp, aProcessed);
}
void HandleOutput(Sample::Param aSample,
void HandleOutput(java::Sample::Param aSample,
java::SampleBuffer::Param aBuffer) override {
MOZ_ASSERT(aBuffer, "Audio sample should have buffer");
// aSample will be implicitly converted into a GlobalRef.
mDecoder->ProcessOutput(std::move(aSample), std::move(aBuffer));
}
void HandleOutputFormatChanged(MediaFormat::Param aFormat) override {
void HandleOutputFormatChanged(
java::sdk::MediaFormat::Param aFormat) override {
int32_t outputChannels = 0;
aFormat->GetInteger(NS_LITERAL_STRING("channel-count"), &outputChannels);
AudioConfig::ChannelLayout layout(outputChannels);
@ -456,11 +461,12 @@ class RemoteAudioDecoder : public RemoteDataDecoder {
// Param and LocalRef are only valid for the duration of a JNI method call.
// Use GlobalRef as the parameter type to keep the Java object referenced
// until running.
void ProcessOutput(Sample::GlobalRef&& aSample,
SampleBuffer::GlobalRef&& aBuffer) {
void ProcessOutput(java::Sample::GlobalRef&& aSample,
java::SampleBuffer::GlobalRef&& aBuffer) {
if (!mTaskQueue->IsCurrentThreadIn()) {
nsresult rv = mTaskQueue->Dispatch(
NewRunnableMethod<Sample::GlobalRef&&, SampleBuffer::GlobalRef&&>(
NewRunnableMethod<java::Sample::GlobalRef&&,
java::SampleBuffer::GlobalRef&&>(
"RemoteAudioDecoder::ProcessOutput", this,
&RemoteAudioDecoder::ProcessOutput, std::move(aSample),
std::move(aBuffer)));
@ -478,12 +484,12 @@ class RemoteAudioDecoder : public RemoteDataDecoder {
RenderOrReleaseOutput autoRelease(mJavaDecoder, aSample);
BufferInfo::LocalRef info = aSample->Info();
java::sdk::BufferInfo::LocalRef info = aSample->Info();
MOZ_ASSERT(info);
int32_t flags = 0;
bool ok = NS_SUCCEEDED(info->Flags(&flags));
bool isEOS = !!(flags & MediaCodec::BUFFER_FLAG_END_OF_STREAM);
bool isEOS = !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
int32_t offset;
ok &= NS_SUCCEEDED(info->Offset(&offset));
@ -555,10 +561,10 @@ already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateAudioDecoder(
const CreateDecoderParams& aParams, const nsString& aDrmStubId,
CDMProxy* aProxy) {
const AudioInfo& config = aParams.AudioConfig();
MediaFormat::LocalRef format;
java::sdk::MediaFormat::LocalRef format;
NS_ENSURE_SUCCESS(
MediaFormat::CreateAudioFormat(config.mMimeType, config.mRate,
config.mChannels, &format),
java::sdk::MediaFormat::CreateAudioFormat(config.mMimeType, config.mRate,
config.mChannels, &format),
nullptr);
RefPtr<MediaDataDecoder> decoder =
@ -573,8 +579,8 @@ already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateVideoDecoder(
const CreateDecoderParams& aParams, const nsString& aDrmStubId,
CDMProxy* aProxy) {
const VideoInfo& config = aParams.VideoConfig();
MediaFormat::LocalRef format;
NS_ENSURE_SUCCESS(MediaFormat::CreateVideoFormat(
java::sdk::MediaFormat::LocalRef format;
NS_ENSURE_SUCCESS(java::sdk::MediaFormat::CreateVideoFormat(
TranslateMimeType(config.mMimeType),
config.mImage.width, config.mImage.height, &format),
nullptr);
@ -589,7 +595,7 @@ already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateVideoDecoder(
RemoteDataDecoder::RemoteDataDecoder(MediaData::Type aType,
const nsACString& aMimeType,
MediaFormat::Param aFormat,
java::sdk::MediaFormat::Param aFormat,
const nsString& aDrmStubId,
TaskQueue* aTaskQueue)
: mType(aType),
@ -640,8 +646,8 @@ RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::Drain() {
}
SetState(State::DRAINING);
self->mInputBufferInfo->Set(0, 0, -1,
MediaCodec::BUFFER_FLAG_END_OF_STREAM);
self->mInputBufferInfo->Set(
0, 0, -1, java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
mSession = mJavaDecoder->Input(nullptr, self->mInputBufferInfo, nullptr);
return p;
});
@ -673,7 +679,7 @@ RefPtr<ShutdownPromise> RemoteDataDecoder::ProcessShutdown() {
return ShutdownPromise::CreateAndResolve(true, __func__);
}
static CryptoInfo::LocalRef GetCryptoInfoFromSample(
static java::sdk::CryptoInfo::LocalRef GetCryptoInfoFromSample(
const MediaRawData* aSample) {
auto& cryptoObj = aSample->mCrypto;
@ -681,8 +687,8 @@ static CryptoInfo::LocalRef GetCryptoInfoFromSample(
return nullptr;
}
CryptoInfo::LocalRef cryptoInfo;
nsresult rv = CryptoInfo::New(&cryptoInfo);
java::sdk::CryptoInfo::LocalRef cryptoInfo;
nsresult rv = java::sdk::CryptoInfo::New(&cryptoInfo);
NS_ENSURE_SUCCESS(rv, nullptr);
uint32_t numSubSamples = std::min<uint32_t>(
@ -701,8 +707,8 @@ static CryptoInfo::LocalRef GetCryptoInfoFromSample(
}
uint32_t codecSpecificDataSize = aSample->Size() - totalSubSamplesSize;
// Size of codec specific data("CSD") for Android MediaCodec usage should be
// included in the 1st plain size.
// Size of codec specific data("CSD") for Android java::sdk::MediaCodec usage
// should be included in the 1st plain size.
plainSizes[0] += codecSpecificDataSize;
static const int kExpectedIVLength = 16;
@ -726,7 +732,7 @@ static CryptoInfo::LocalRef GetCryptoInfoFromSample(
reinterpret_cast<const int8_t*>(&cryptoObj.mKeyId[0]),
cryptoObj.mKeyId.Length());
cryptoInfo->Set(numSubSamples, numBytesOfPlainData, numBytesOfEncryptedData,
keyId, iv, MediaCodec::CRYPTO_MODE_AES_CTR);
keyId, iv, java::sdk::MediaCodec::CRYPTO_MODE_AES_CTR);
return cryptoInfo;
}