зеркало из https://github.com/mozilla/gecko-dev.git
Backed out 12 changesets (bug 1665776) for causing mda failures. CLOSED TREE
Backed out changeset 1093b571cd9e (bug 1665776) Backed out changeset 97401a43356f (bug 1665776) Backed out changeset e4d1fab7ed91 (bug 1665776) Backed out changeset eef564e4e8ce (bug 1665776) Backed out changeset b436ced5a9c2 (bug 1665776) Backed out changeset 231057bbccc6 (bug 1665776) Backed out changeset 918d100709b3 (bug 1665776) Backed out changeset 63a6fb712a7c (bug 1665776) Backed out changeset 2f9e721a4adb (bug 1665776) Backed out changeset f65349f581ac (bug 1665776) Backed out changeset eb289c096758 (bug 1665776) Backed out changeset 077b96856a67 (bug 1665776)
This commit is contained in:
Родитель
48b857316e
Коммит
84969e5caa
|
@ -14,17 +14,16 @@
|
|||
#include "PEMFactory.h"
|
||||
#include "TimeUnits.h"
|
||||
#include "VideoUtils.h"
|
||||
#include "VPXDecoder.h"
|
||||
#include <algorithm>
|
||||
|
||||
#include <fstream>
|
||||
|
||||
#define RUN_IF_SUPPORTED(mimeType, test) \
|
||||
do { \
|
||||
RefPtr<PEMFactory> f(new PEMFactory()); \
|
||||
if (f->SupportsMimeType(nsLiteralCString(mimeType))) { \
|
||||
test(); \
|
||||
} \
|
||||
#define SKIP_IF_NOT_SUPPORTED(mimeType) \
|
||||
do { \
|
||||
RefPtr<PEMFactory> f(new PEMFactory()); \
|
||||
if (!f->SupportsMimeType(nsLiteralCString(mimeType))) { \
|
||||
return; \
|
||||
} \
|
||||
} while (0)
|
||||
|
||||
#define BLOCK_SIZE 64
|
||||
|
@ -35,8 +34,6 @@
|
|||
#define FRAME_DURATION (1000000 / FRAME_RATE)
|
||||
#define BIT_RATE (1000 * 1000) // 1Mbps
|
||||
#define KEYFRAME_INTERVAL FRAME_RATE // 1 keyframe per second
|
||||
#define VIDEO_VP8 "video/vp8"
|
||||
#define VIDEO_VP9 "video/vp9"
|
||||
|
||||
using namespace mozilla;
|
||||
|
||||
|
@ -138,38 +135,6 @@ class MediaDataEncoderTest : public testing::Test {
|
|||
FrameSource mData;
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
already_AddRefed<MediaDataEncoder> CreateVideoEncoder(
|
||||
const char* aMimeType, MediaDataEncoder::Usage aUsage,
|
||||
MediaDataEncoder::PixelFormat aPixelFormat, int32_t aWidth, int32_t aHeight,
|
||||
const Maybe<T>& aSpecific) {
|
||||
RefPtr<PEMFactory> f(new PEMFactory());
|
||||
|
||||
if (!f->SupportsMimeType(nsCString(aMimeType))) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
VideoInfo videoInfo(aWidth, aHeight);
|
||||
videoInfo.mMimeType = nsCString(aMimeType);
|
||||
const RefPtr<TaskQueue> taskQueue(
|
||||
new TaskQueue(GetMediaThreadPool(MediaThreadType::PLATFORM_ENCODER)));
|
||||
|
||||
RefPtr<MediaDataEncoder> e;
|
||||
if (aSpecific) {
|
||||
e = f->CreateEncoder(CreateEncoderParams(
|
||||
videoInfo /* track info */, aUsage, taskQueue, aPixelFormat,
|
||||
FRAME_RATE /* FPS */, KEYFRAME_INTERVAL /* keyframe interval */,
|
||||
BIT_RATE /* bitrate */, aSpecific.value()));
|
||||
} else {
|
||||
e = f->CreateEncoder(CreateEncoderParams(
|
||||
videoInfo /* track info */, aUsage, taskQueue, aPixelFormat,
|
||||
FRAME_RATE /* FPS */, KEYFRAME_INTERVAL /* keyframe interval */,
|
||||
BIT_RATE /* bitrate */));
|
||||
}
|
||||
|
||||
return e.forget();
|
||||
}
|
||||
|
||||
static already_AddRefed<MediaDataEncoder> CreateH264Encoder(
|
||||
MediaDataEncoder::Usage aUsage = MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat aPixelFormat =
|
||||
|
@ -177,9 +142,31 @@ static already_AddRefed<MediaDataEncoder> CreateH264Encoder(
|
|||
int32_t aWidth = WIDTH, int32_t aHeight = HEIGHT,
|
||||
const Maybe<MediaDataEncoder::H264Specific>& aSpecific =
|
||||
Some(MediaDataEncoder::H264Specific(
|
||||
KEYFRAME_INTERVAL,
|
||||
MediaDataEncoder::H264Specific::ProfileLevel::BaselineAutoLevel))) {
|
||||
return CreateVideoEncoder(VIDEO_MP4, aUsage, aPixelFormat, aWidth, aHeight,
|
||||
aSpecific);
|
||||
RefPtr<PEMFactory> f(new PEMFactory());
|
||||
|
||||
if (!f->SupportsMimeType(nsLiteralCString(VIDEO_MP4))) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
VideoInfo videoInfo(aWidth, aHeight);
|
||||
videoInfo.mMimeType = nsLiteralCString(VIDEO_MP4);
|
||||
const RefPtr<TaskQueue> taskQueue(
|
||||
new TaskQueue(GetMediaThreadPool(MediaThreadType::PLATFORM_ENCODER)));
|
||||
|
||||
RefPtr<MediaDataEncoder> e;
|
||||
if (aSpecific) {
|
||||
e = f->CreateEncoder(CreateEncoderParams(
|
||||
videoInfo /* track info */, aUsage, taskQueue, aPixelFormat,
|
||||
FRAME_RATE /* FPS */, BIT_RATE /* bitrate */, aSpecific.value()));
|
||||
} else {
|
||||
e = f->CreateEncoder(CreateEncoderParams(
|
||||
videoInfo /* track info */, aUsage, taskQueue, aPixelFormat,
|
||||
FRAME_RATE /* FPS */, BIT_RATE /* bitrate */));
|
||||
}
|
||||
|
||||
return e.forget();
|
||||
}
|
||||
|
||||
void WaitForShutdown(RefPtr<MediaDataEncoder> aEncoder) {
|
||||
|
@ -202,11 +189,13 @@ void WaitForShutdown(RefPtr<MediaDataEncoder> aEncoder) {
|
|||
}
|
||||
|
||||
TEST_F(MediaDataEncoderTest, H264Create) {
|
||||
RUN_IF_SUPPORTED(VIDEO_MP4, []() {
|
||||
RefPtr<MediaDataEncoder> e = CreateH264Encoder();
|
||||
EXPECT_TRUE(e);
|
||||
WaitForShutdown(e);
|
||||
});
|
||||
SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
|
||||
|
||||
RefPtr<MediaDataEncoder> e = CreateH264Encoder();
|
||||
|
||||
EXPECT_TRUE(e);
|
||||
|
||||
WaitForShutdown(e);
|
||||
}
|
||||
|
||||
static bool EnsureInit(RefPtr<MediaDataEncoder> aEncoder) {
|
||||
|
@ -225,20 +214,30 @@ static bool EnsureInit(RefPtr<MediaDataEncoder> aEncoder) {
|
|||
return succeeded;
|
||||
}
|
||||
|
||||
TEST_F(MediaDataEncoderTest, H264Inits) {
|
||||
RUN_IF_SUPPORTED(VIDEO_MP4, []() {
|
||||
// w/o codec specific.
|
||||
RefPtr<MediaDataEncoder> e = CreateH264Encoder(
|
||||
MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat::YUV420P, WIDTH, HEIGHT, Nothing());
|
||||
EXPECT_TRUE(EnsureInit(e));
|
||||
WaitForShutdown(e);
|
||||
TEST_F(MediaDataEncoderTest, H264InitWithoutSpecific) {
|
||||
SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
|
||||
|
||||
// w/ codec specific
|
||||
e = CreateH264Encoder();
|
||||
EXPECT_TRUE(EnsureInit(e));
|
||||
WaitForShutdown(e);
|
||||
});
|
||||
RefPtr<MediaDataEncoder> e = CreateH264Encoder(
|
||||
MediaDataEncoder::Usage::Realtime, MediaDataEncoder::PixelFormat::YUV420P,
|
||||
WIDTH, HEIGHT, Nothing());
|
||||
|
||||
#if defined(MOZ_WIDGET_ANDROID) // Android encoder requires I-frame interval
|
||||
EXPECT_FALSE(EnsureInit(e));
|
||||
#else
|
||||
EXPECT_TRUE(EnsureInit(e));
|
||||
#endif
|
||||
|
||||
WaitForShutdown(e);
|
||||
}
|
||||
|
||||
TEST_F(MediaDataEncoderTest, H264Init) {
|
||||
SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
|
||||
|
||||
RefPtr<MediaDataEncoder> e = CreateH264Encoder();
|
||||
|
||||
EXPECT_TRUE(EnsureInit(e));
|
||||
|
||||
WaitForShutdown(e);
|
||||
}
|
||||
|
||||
static MediaDataEncoder::EncodedData Encode(
|
||||
|
@ -281,207 +280,83 @@ static MediaDataEncoder::EncodedData Encode(
|
|||
return output;
|
||||
}
|
||||
|
||||
TEST_F(MediaDataEncoderTest, H264Encodes) {
|
||||
RUN_IF_SUPPORTED(VIDEO_MP4, [this]() {
|
||||
// Encode one frame and output in AnnexB format.
|
||||
RefPtr<MediaDataEncoder> e = CreateH264Encoder();
|
||||
EnsureInit(e);
|
||||
MediaDataEncoder::EncodedData output = Encode(e, 1UL, mData);
|
||||
EXPECT_EQ(output.Length(), 1UL);
|
||||
EXPECT_TRUE(AnnexB::IsAnnexB(output[0]));
|
||||
WaitForShutdown(e);
|
||||
TEST_F(MediaDataEncoderTest, H264EncodeOneFrameAsAnnexB) {
|
||||
SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
|
||||
|
||||
// Encode multiple frames and output in AnnexB format.
|
||||
e = CreateH264Encoder();
|
||||
EnsureInit(e);
|
||||
output = Encode(e, NUM_FRAMES, mData);
|
||||
EXPECT_EQ(output.Length(), NUM_FRAMES);
|
||||
for (auto frame : output) {
|
||||
EXPECT_TRUE(AnnexB::IsAnnexB(frame));
|
||||
}
|
||||
WaitForShutdown(e);
|
||||
RefPtr<MediaDataEncoder> e = CreateH264Encoder();
|
||||
EnsureInit(e);
|
||||
|
||||
// Encode one frame and output in avcC format.
|
||||
e = CreateH264Encoder(MediaDataEncoder::Usage::Record);
|
||||
EnsureInit(e);
|
||||
output = Encode(e, NUM_FRAMES, mData);
|
||||
EXPECT_EQ(output.Length(), NUM_FRAMES);
|
||||
AnnexB::IsAVCC(output[0]); // Only 1st frame has extra data.
|
||||
for (auto frame : output) {
|
||||
EXPECT_FALSE(AnnexB::IsAnnexB(frame));
|
||||
}
|
||||
WaitForShutdown(e);
|
||||
});
|
||||
MediaDataEncoder::EncodedData output = Encode(e, 1UL, mData);
|
||||
EXPECT_EQ(output.Length(), 1UL);
|
||||
EXPECT_TRUE(AnnexB::IsAnnexB(output[0]));
|
||||
|
||||
WaitForShutdown(e);
|
||||
}
|
||||
|
||||
TEST_F(MediaDataEncoderTest, EncodeMultipleFramesAsAnnexB) {
|
||||
SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
|
||||
|
||||
RefPtr<MediaDataEncoder> e = CreateH264Encoder();
|
||||
EnsureInit(e);
|
||||
|
||||
MediaDataEncoder::EncodedData output = Encode(e, NUM_FRAMES, mData);
|
||||
EXPECT_EQ(output.Length(), NUM_FRAMES);
|
||||
for (auto frame : output) {
|
||||
EXPECT_TRUE(AnnexB::IsAnnexB(frame));
|
||||
}
|
||||
|
||||
WaitForShutdown(e);
|
||||
}
|
||||
|
||||
TEST_F(MediaDataEncoderTest, EncodeMultipleFramesAsAVCC) {
|
||||
SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
|
||||
|
||||
RefPtr<MediaDataEncoder> e =
|
||||
CreateH264Encoder(MediaDataEncoder::Usage::Record);
|
||||
EnsureInit(e);
|
||||
|
||||
MediaDataEncoder::EncodedData output = Encode(e, NUM_FRAMES, mData);
|
||||
EXPECT_EQ(output.Length(), NUM_FRAMES);
|
||||
AnnexB::IsAVCC(output[0]); // Only 1st frame has extra data.
|
||||
for (auto frame : output) {
|
||||
EXPECT_FALSE(AnnexB::IsAnnexB(frame));
|
||||
}
|
||||
|
||||
WaitForShutdown(e);
|
||||
}
|
||||
|
||||
#ifndef DEBUG // Zero width or height will assert/crash in debug builds.
|
||||
TEST_F(MediaDataEncoderTest, InvalidSize) {
|
||||
RUN_IF_SUPPORTED(VIDEO_MP4, []() {
|
||||
RefPtr<MediaDataEncoder> e0x0 =
|
||||
CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat::YUV420P, 0, 0);
|
||||
EXPECT_NE(e0x0, nullptr);
|
||||
EXPECT_FALSE(EnsureInit(e0x0));
|
||||
SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
|
||||
|
||||
RefPtr<MediaDataEncoder> e0x1 =
|
||||
CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat::YUV420P, 0, 1);
|
||||
EXPECT_NE(e0x1, nullptr);
|
||||
EXPECT_FALSE(EnsureInit(e0x1));
|
||||
RefPtr<MediaDataEncoder> e0x0 =
|
||||
CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat::YUV420P, 0, 0);
|
||||
EXPECT_NE(e0x0, nullptr);
|
||||
EXPECT_FALSE(EnsureInit(e0x0));
|
||||
|
||||
RefPtr<MediaDataEncoder> e1x0 =
|
||||
CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat::YUV420P, 1, 0);
|
||||
EXPECT_NE(e1x0, nullptr);
|
||||
EXPECT_FALSE(EnsureInit(e1x0));
|
||||
});
|
||||
RefPtr<MediaDataEncoder> e0x1 =
|
||||
CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat::YUV420P, 0, 1);
|
||||
EXPECT_NE(e0x1, nullptr);
|
||||
EXPECT_FALSE(EnsureInit(e0x1));
|
||||
|
||||
RefPtr<MediaDataEncoder> e1x0 =
|
||||
CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat::YUV420P, 1, 0);
|
||||
EXPECT_NE(e1x0, nullptr);
|
||||
EXPECT_FALSE(EnsureInit(e1x0));
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef MOZ_WIDGET_ANDROID
|
||||
TEST_F(MediaDataEncoderTest, AndroidNotSupportedSize) {
|
||||
RUN_IF_SUPPORTED(VIDEO_MP4, []() {
|
||||
RefPtr<MediaDataEncoder> e =
|
||||
CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat::YUV420P, 1, 1);
|
||||
EXPECT_NE(e, nullptr);
|
||||
EXPECT_FALSE(EnsureInit(e));
|
||||
});
|
||||
SKIP_IF_NOT_SUPPORTED(VIDEO_MP4);
|
||||
|
||||
RefPtr<MediaDataEncoder> e =
|
||||
CreateH264Encoder(MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat::YUV420P, 1, 1);
|
||||
EXPECT_NE(e, nullptr);
|
||||
EXPECT_FALSE(EnsureInit(e));
|
||||
}
|
||||
#endif
|
||||
|
||||
static already_AddRefed<MediaDataEncoder> CreateVP8Encoder(
|
||||
MediaDataEncoder::Usage aUsage = MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat aPixelFormat =
|
||||
MediaDataEncoder::PixelFormat::YUV420P,
|
||||
int32_t aWidth = WIDTH, int32_t aHeight = HEIGHT,
|
||||
const Maybe<MediaDataEncoder::VPXSpecific::VP8>& aSpecific =
|
||||
Some(MediaDataEncoder::VPXSpecific::VP8())) {
|
||||
return CreateVideoEncoder(VIDEO_VP8, aUsage, aPixelFormat, aWidth, aHeight,
|
||||
aSpecific);
|
||||
}
|
||||
|
||||
static already_AddRefed<MediaDataEncoder> CreateVP9Encoder(
|
||||
MediaDataEncoder::Usage aUsage = MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat aPixelFormat =
|
||||
MediaDataEncoder::PixelFormat::YUV420P,
|
||||
int32_t aWidth = WIDTH, int32_t aHeight = HEIGHT,
|
||||
const Maybe<MediaDataEncoder::VPXSpecific::VP9>& aSpecific =
|
||||
Some(MediaDataEncoder::VPXSpecific::VP9())) {
|
||||
return CreateVideoEncoder(VIDEO_VP9, aUsage, aPixelFormat, aWidth, aHeight,
|
||||
aSpecific);
|
||||
}
|
||||
|
||||
TEST_F(MediaDataEncoderTest, VP8Create) {
|
||||
RUN_IF_SUPPORTED(VIDEO_VP8, []() {
|
||||
RefPtr<MediaDataEncoder> e = CreateVP8Encoder();
|
||||
EXPECT_TRUE(e);
|
||||
WaitForShutdown(e);
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(MediaDataEncoderTest, VP8Inits) {
|
||||
RUN_IF_SUPPORTED(VIDEO_VP8, []() {
|
||||
// w/o codec specific.
|
||||
RefPtr<MediaDataEncoder> e = CreateVP8Encoder(
|
||||
MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat::YUV420P, WIDTH, HEIGHT, Nothing());
|
||||
EXPECT_TRUE(EnsureInit(e));
|
||||
WaitForShutdown(e);
|
||||
|
||||
// w/ codec specific
|
||||
e = CreateVP8Encoder();
|
||||
EXPECT_TRUE(EnsureInit(e));
|
||||
WaitForShutdown(e);
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(MediaDataEncoderTest, VP8Encodes) {
|
||||
RUN_IF_SUPPORTED(VIDEO_VP8, [this]() {
|
||||
// Encode one VPX frame.
|
||||
RefPtr<MediaDataEncoder> e = CreateVP8Encoder();
|
||||
EnsureInit(e);
|
||||
MediaDataEncoder::EncodedData output = Encode(e, 1UL, mData);
|
||||
EXPECT_EQ(output.Length(), 1UL);
|
||||
VPXDecoder::VPXStreamInfo info;
|
||||
EXPECT_TRUE(VPXDecoder::GetStreamInfo(*output[0], info,
|
||||
VPXDecoder::Codec::VP8));
|
||||
EXPECT_EQ(info.mKeyFrame, output[0]->mKeyframe);
|
||||
if (info.mKeyFrame) {
|
||||
EXPECT_EQ(info.mImage, kImageSize);
|
||||
}
|
||||
WaitForShutdown(e);
|
||||
|
||||
// Encode multiple VPX frames.
|
||||
e = CreateVP8Encoder();
|
||||
EnsureInit(e);
|
||||
output = Encode(e, NUM_FRAMES, mData);
|
||||
EXPECT_EQ(output.Length(), NUM_FRAMES);
|
||||
for (auto frame : output) {
|
||||
VPXDecoder::VPXStreamInfo info;
|
||||
EXPECT_TRUE(VPXDecoder::GetStreamInfo(*frame, info,
|
||||
VPXDecoder::Codec::VP8));
|
||||
EXPECT_EQ(info.mKeyFrame, frame->mKeyframe);
|
||||
if (info.mKeyFrame) {
|
||||
EXPECT_EQ(info.mImage, kImageSize);
|
||||
}
|
||||
}
|
||||
WaitForShutdown(e);
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(MediaDataEncoderTest, VP9Create) {
|
||||
RUN_IF_SUPPORTED(VIDEO_VP9, []() {
|
||||
RefPtr<MediaDataEncoder> e = CreateVP9Encoder();
|
||||
EXPECT_TRUE(e);
|
||||
WaitForShutdown(e);
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(MediaDataEncoderTest, VP9Inits) {
|
||||
RUN_IF_SUPPORTED(VIDEO_VP9, []() {
|
||||
// w/o codec specific.
|
||||
RefPtr<MediaDataEncoder> e = CreateVP9Encoder(
|
||||
MediaDataEncoder::Usage::Realtime,
|
||||
MediaDataEncoder::PixelFormat::YUV420P, WIDTH, HEIGHT, Nothing());
|
||||
EXPECT_TRUE(EnsureInit(e));
|
||||
WaitForShutdown(e);
|
||||
|
||||
// w/ codec specific
|
||||
e = CreateVP9Encoder();
|
||||
EXPECT_TRUE(EnsureInit(e));
|
||||
WaitForShutdown(e);
|
||||
});
|
||||
}
|
||||
|
||||
TEST_F(MediaDataEncoderTest, VP9Encodes) {
|
||||
RUN_IF_SUPPORTED(VIDEO_VP9, [this]() {
|
||||
RefPtr<MediaDataEncoder> e = CreateVP9Encoder();
|
||||
EnsureInit(e);
|
||||
MediaDataEncoder::EncodedData output = Encode(e, 1UL, mData);
|
||||
EXPECT_EQ(output.Length(), 1UL);
|
||||
VPXDecoder::VPXStreamInfo info;
|
||||
EXPECT_TRUE(VPXDecoder::GetStreamInfo(*output[0], info,
|
||||
VPXDecoder::Codec::VP9));
|
||||
EXPECT_EQ(info.mKeyFrame, output[0]->mKeyframe);
|
||||
if (info.mKeyFrame) {
|
||||
EXPECT_EQ(info.mImage, kImageSize);
|
||||
}
|
||||
WaitForShutdown(e);
|
||||
|
||||
e = CreateVP9Encoder();
|
||||
EnsureInit(e);
|
||||
output = Encode(e, NUM_FRAMES, mData);
|
||||
EXPECT_EQ(output.Length(), NUM_FRAMES);
|
||||
for (auto frame : output) {
|
||||
VPXDecoder::VPXStreamInfo info;
|
||||
EXPECT_TRUE(VPXDecoder::GetStreamInfo(*frame, info,
|
||||
VPXDecoder::Codec::VP9));
|
||||
EXPECT_EQ(info.mKeyFrame, frame->mKeyframe);
|
||||
if (info.mKeyFrame) {
|
||||
EXPECT_EQ(info.mImage, kImageSize);
|
||||
}
|
||||
}
|
||||
WaitForShutdown(e);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
#if !defined(PlatformEncoderModule_h_)
|
||||
# define PlatformEncoderModule_h_
|
||||
|
||||
# include "MP4Decoder.h"
|
||||
# include "MediaData.h"
|
||||
# include "MediaInfo.h"
|
||||
# include "MediaResult.h"
|
||||
|
@ -18,7 +17,6 @@
|
|||
# include "mozilla/TaskQueue.h"
|
||||
# include "mozilla/dom/ImageBitmapBinding.h"
|
||||
# include "nsISupportsImpl.h"
|
||||
# include "VPXDecoder.h"
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
|
@ -72,10 +70,12 @@ class MediaDataEncoder {
|
|||
struct H264Specific final {
|
||||
enum class ProfileLevel { BaselineAutoLevel, MainAutoLevel };
|
||||
|
||||
const size_t mKeyframeInterval;
|
||||
const ProfileLevel mProfileLevel;
|
||||
|
||||
explicit H264Specific(const ProfileLevel aProfileLevel)
|
||||
: mProfileLevel(aProfileLevel) {}
|
||||
H264Specific(const size_t aKeyframeInterval,
|
||||
const ProfileLevel aProfileLevel)
|
||||
: mKeyframeInterval(aKeyframeInterval), mProfileLevel(aProfileLevel) {}
|
||||
};
|
||||
|
||||
struct OpusSpecific final {
|
||||
|
@ -90,71 +90,6 @@ class MediaDataEncoder {
|
|||
}
|
||||
};
|
||||
|
||||
// From webrtc::VideoCodecVP8. mResilience is a boolean value because while
|
||||
// VP8ResilienceMode has 3 values, kResilientFrames is not supported.
|
||||
# define VPX_COMMON_SETTINGS \
|
||||
const Complexity mComplexity; \
|
||||
const bool mResilience; \
|
||||
const uint8_t mNumTemporalLayers; \
|
||||
const bool mDenoising; \
|
||||
const bool mAutoResize; \
|
||||
const bool mFrameDropping;
|
||||
|
||||
// See webrtc::VideoEncoder::GetDefaultVp(8|9)Settings().
|
||||
# define VPX_COMMON_DEFAULTS(resize) \
|
||||
mComplexity(Complexity::Normal), mResilience(true), mNumTemporalLayers(1), \
|
||||
mDenoising(true), mAutoResize(resize), mFrameDropping(0)
|
||||
|
||||
struct VPXSpecific final {
|
||||
enum class Complexity { Normal, High, Higher, Max };
|
||||
struct VP8 final {
|
||||
VPX_COMMON_SETTINGS
|
||||
// Ignore webrtc::VideoCodecVP8::errorConcealmentOn,
|
||||
// for it's always false in the codebase (except libwebrtc test cases).
|
||||
|
||||
VP8() : VPX_COMMON_DEFAULTS(false /* auto resize */) {}
|
||||
VP8(const Complexity aComplexity, const bool aResilience,
|
||||
const uint8_t aNumTemporalLayers, const bool aDenoising,
|
||||
const bool aAutoResize, const bool aFrameDropping)
|
||||
: mComplexity(aComplexity),
|
||||
mResilience(aResilience),
|
||||
mNumTemporalLayers(aNumTemporalLayers),
|
||||
mDenoising(aDenoising),
|
||||
mAutoResize(aAutoResize),
|
||||
mFrameDropping(aFrameDropping) {}
|
||||
};
|
||||
|
||||
struct VP9 final {
|
||||
VPX_COMMON_SETTINGS
|
||||
// From webrtc::VideoCodecVP9.
|
||||
bool mAdaptiveQp;
|
||||
uint8_t mNumSpatialLayers;
|
||||
bool mFlexible;
|
||||
|
||||
VP9()
|
||||
: VPX_COMMON_DEFAULTS(true /* auto resize */),
|
||||
mAdaptiveQp(true),
|
||||
mNumSpatialLayers(1),
|
||||
mFlexible(false) {}
|
||||
VP9(const Complexity aComplexity, const bool aResilience,
|
||||
const uint8_t aNumTemporalLayers, const bool aDenoising,
|
||||
const bool aAutoResize, const bool aFrameDropping,
|
||||
const bool aAdaptiveQp, const uint8_t aNumSpatialLayers,
|
||||
const bool aFlexible)
|
||||
: mComplexity(aComplexity),
|
||||
mResilience(aResilience),
|
||||
mNumTemporalLayers(aNumTemporalLayers),
|
||||
mDenoising(aDenoising),
|
||||
mAutoResize(aAutoResize),
|
||||
mFrameDropping(aFrameDropping),
|
||||
mAdaptiveQp(aAdaptiveQp),
|
||||
mNumSpatialLayers(aNumSpatialLayers),
|
||||
mFlexible(aFlexible) {}
|
||||
};
|
||||
|
||||
VPXSpecific() = delete;
|
||||
};
|
||||
|
||||
static bool IsVideo(const CodecType aCodec) {
|
||||
return aCodec > CodecType::_BeginVideo_ && aCodec < CodecType::_EndVideo_;
|
||||
}
|
||||
|
@ -245,17 +180,13 @@ class MediaDataEncoder {
|
|||
const gfx::IntSize mSize;
|
||||
const PixelFormat mSourcePixelFormat;
|
||||
const uint8_t mFramerate;
|
||||
const size_t mKeyframeInterval;
|
||||
|
||||
VideoConfig(const CodecType aCodecType, const Usage aUsage,
|
||||
const gfx::IntSize& aSize, const PixelFormat aSourcePixelFormat,
|
||||
const uint8_t aFramerate, const size_t aKeyframeInterval,
|
||||
const Rate aBitrate)
|
||||
const uint8_t aFramerate, const Rate aBitrate)
|
||||
: BaseConfig<T>(aCodecType, aUsage, aBitrate),
|
||||
mSize(aSize),
|
||||
mSourcePixelFormat(aSourcePixelFormat),
|
||||
mFramerate(aFramerate),
|
||||
mKeyframeInterval(aKeyframeInterval) {}
|
||||
mFramerate(aFramerate) {}
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
|
@ -275,39 +206,30 @@ class MediaDataEncoder {
|
|||
|
||||
public:
|
||||
using H264Config = VideoConfig<H264Specific>;
|
||||
using VP8Config = VideoConfig<VPXSpecific::VP8>;
|
||||
using VP9Config = VideoConfig<VPXSpecific::VP9>;
|
||||
};
|
||||
|
||||
struct MOZ_STACK_CLASS CreateEncoderParams final {
|
||||
union CodecSpecific {
|
||||
MediaDataEncoder::H264Specific mH264;
|
||||
MediaDataEncoder::OpusSpecific mOpus;
|
||||
MediaDataEncoder::VPXSpecific::VP8 mVP8;
|
||||
MediaDataEncoder::VPXSpecific::VP9 mVP9;
|
||||
|
||||
explicit CodecSpecific(const MediaDataEncoder::H264Specific&& aH264)
|
||||
: mH264(aH264) {}
|
||||
explicit CodecSpecific(const MediaDataEncoder::OpusSpecific&& aOpus)
|
||||
: mOpus(aOpus) {}
|
||||
explicit CodecSpecific(const MediaDataEncoder::VPXSpecific::VP8&& aVP8)
|
||||
: mVP8(aVP8) {}
|
||||
explicit CodecSpecific(const MediaDataEncoder::VPXSpecific::VP9&& aVP9)
|
||||
: mVP9(aVP9) {}
|
||||
};
|
||||
|
||||
CreateEncoderParams(const TrackInfo& aConfig,
|
||||
const MediaDataEncoder::Usage aUsage,
|
||||
const RefPtr<TaskQueue> aTaskQueue,
|
||||
const MediaDataEncoder::PixelFormat aPixelFormat,
|
||||
const uint8_t aFramerate, const size_t aKeyframeInterval,
|
||||
const uint8_t aFramerate,
|
||||
const MediaDataEncoder::Rate aBitrate)
|
||||
: mConfig(aConfig),
|
||||
mUsage(aUsage),
|
||||
mTaskQueue(aTaskQueue),
|
||||
mPixelFormat(aPixelFormat),
|
||||
mFramerate(aFramerate),
|
||||
mKeyframeInterval(aKeyframeInterval),
|
||||
mBitrate(aBitrate) {
|
||||
MOZ_ASSERT(mTaskQueue);
|
||||
}
|
||||
|
@ -317,7 +239,7 @@ struct MOZ_STACK_CLASS CreateEncoderParams final {
|
|||
const MediaDataEncoder::Usage aUsage,
|
||||
const RefPtr<TaskQueue> aTaskQueue,
|
||||
const MediaDataEncoder::PixelFormat aPixelFormat,
|
||||
const uint8_t aFramerate, const size_t aKeyframeInterval,
|
||||
const uint8_t aFramerate,
|
||||
const MediaDataEncoder::Rate aBitrate,
|
||||
const Ts&&... aCodecSpecific)
|
||||
: mConfig(aConfig),
|
||||
|
@ -325,15 +247,9 @@ struct MOZ_STACK_CLASS CreateEncoderParams final {
|
|||
mTaskQueue(aTaskQueue),
|
||||
mPixelFormat(aPixelFormat),
|
||||
mFramerate(aFramerate),
|
||||
mKeyframeInterval(aKeyframeInterval),
|
||||
mBitrate(aBitrate) {
|
||||
MOZ_ASSERT(mTaskQueue);
|
||||
SetCodecSpecific(std::forward<const Ts>(aCodecSpecific)...);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
void SetCodecSpecific(const T&& aCodecSpecific) {
|
||||
mCodecSpecific.emplace(std::forward<const T>(aCodecSpecific));
|
||||
Set(std::forward<const Ts>(aCodecSpecific)...);
|
||||
}
|
||||
|
||||
const MediaDataEncoder::H264Config ToH264Config() const {
|
||||
|
@ -342,7 +258,7 @@ struct MOZ_STACK_CLASS CreateEncoderParams final {
|
|||
|
||||
auto config = MediaDataEncoder::H264Config(
|
||||
MediaDataEncoder::CodecType::H264, mUsage, info->mImage, mPixelFormat,
|
||||
mFramerate, mKeyframeInterval, mBitrate);
|
||||
mFramerate, mBitrate);
|
||||
if (mCodecSpecific) {
|
||||
config.SetCodecSpecific(mCodecSpecific.ref().mH264);
|
||||
}
|
||||
|
@ -350,56 +266,19 @@ struct MOZ_STACK_CLASS CreateEncoderParams final {
|
|||
return config;
|
||||
}
|
||||
|
||||
const MediaDataEncoder::VP8Config ToVP8Config() const {
|
||||
const VideoInfo* info = mConfig.GetAsVideoInfo();
|
||||
MOZ_ASSERT(info);
|
||||
|
||||
auto config = MediaDataEncoder::VP8Config(
|
||||
CodecTypeForMime(info->mMimeType), mUsage, info->mImage, mPixelFormat,
|
||||
mFramerate, mKeyframeInterval, mBitrate);
|
||||
if (mCodecSpecific) {
|
||||
config.SetCodecSpecific(mCodecSpecific.ref().mVP8);
|
||||
}
|
||||
return config;
|
||||
}
|
||||
|
||||
const MediaDataEncoder::VP9Config ToVP9Config() const {
|
||||
const VideoInfo* info = mConfig.GetAsVideoInfo();
|
||||
MOZ_ASSERT(info);
|
||||
|
||||
auto config = MediaDataEncoder::VP9Config(
|
||||
CodecTypeForMime(info->mMimeType), mUsage, info->mImage, mPixelFormat,
|
||||
mFramerate, mKeyframeInterval, mBitrate);
|
||||
if (mCodecSpecific) {
|
||||
config.SetCodecSpecific(mCodecSpecific.ref().mVP9);
|
||||
}
|
||||
return config;
|
||||
}
|
||||
|
||||
static MediaDataEncoder::CodecType CodecTypeForMime(
|
||||
const nsACString& aMimeType) {
|
||||
if (MP4Decoder::IsH264(aMimeType)) {
|
||||
return MediaDataEncoder::CodecType::H264;
|
||||
} else if (VPXDecoder::IsVPX(aMimeType, VPXDecoder::VP8)) {
|
||||
return MediaDataEncoder::CodecType::VP8;
|
||||
} else if (VPXDecoder::IsVPX(aMimeType, VPXDecoder::VP9)) {
|
||||
return MediaDataEncoder::CodecType::VP9;
|
||||
} else {
|
||||
MOZ_ASSERT_UNREACHABLE("Unsupported Mimetype");
|
||||
return MediaDataEncoder::CodecType::Unknown;
|
||||
}
|
||||
}
|
||||
|
||||
const TrackInfo& mConfig;
|
||||
const MediaDataEncoder::Usage mUsage;
|
||||
const RefPtr<TaskQueue> mTaskQueue;
|
||||
const MediaDataEncoder::PixelFormat mPixelFormat;
|
||||
const uint8_t mFramerate;
|
||||
const size_t mKeyframeInterval;
|
||||
const MediaDataEncoder::Rate mBitrate;
|
||||
Maybe<CodecSpecific> mCodecSpecific;
|
||||
|
||||
private:
|
||||
template <typename T>
|
||||
void Set(const T&& aCodecSpecific) {
|
||||
mCodecSpecific.emplace(std::forward<const T>(aCodecSpecific));
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace mozilla
|
||||
|
|
|
@ -37,25 +37,20 @@ extern LazyLogModule sPEMLog;
|
|||
} \
|
||||
} while (0)
|
||||
|
||||
template <typename ConfigType>
|
||||
RefPtr<MediaDataEncoder::InitPromise> AndroidDataEncoder<ConfigType>::Init() {
|
||||
RefPtr<MediaDataEncoder::InitPromise> AndroidDataEncoder::Init() {
|
||||
// Sanity-check the input size for Android software encoder fails to do it.
|
||||
if (mConfig.mSize.width == 0 || mConfig.mSize.height == 0) {
|
||||
return InitPromise::CreateAndReject(NS_ERROR_ILLEGAL_VALUE, __func__);
|
||||
}
|
||||
|
||||
return InvokeAsync(mTaskQueue, this, __func__,
|
||||
&AndroidDataEncoder<ConfigType>::ProcessInit);
|
||||
&AndroidDataEncoder::ProcessInit);
|
||||
}
|
||||
|
||||
static const char* MimeTypeOf(MediaDataEncoder::CodecType aCodec) {
|
||||
switch (aCodec) {
|
||||
case MediaDataEncoder::CodecType::H264:
|
||||
return "video/avc";
|
||||
case MediaDataEncoder::CodecType::VP8:
|
||||
return "video/x-vnd.on2.vp8";
|
||||
case MediaDataEncoder::CodecType::VP9:
|
||||
return "video/x-vnd.on2.vp9";
|
||||
default:
|
||||
return "";
|
||||
}
|
||||
|
@ -63,8 +58,13 @@ static const char* MimeTypeOf(MediaDataEncoder::CodecType aCodec) {
|
|||
|
||||
using FormatResult = Result<java::sdk::MediaFormat::LocalRef, MediaResult>;
|
||||
|
||||
template <typename ConfigType>
|
||||
FormatResult ToMediaFormat(const ConfigType& aConfig) {
|
||||
FormatResult ToMediaFormat(const AndroidDataEncoder::Config& aConfig) {
|
||||
if (!aConfig.mCodecSpecific) {
|
||||
return FormatResult(
|
||||
MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
"Android video encoder requires I-frame inverval"));
|
||||
}
|
||||
|
||||
nsresult rv = NS_OK;
|
||||
java::sdk::MediaFormat::LocalRef format;
|
||||
rv = java::sdk::MediaFormat::CreateVideoFormat(MimeTypeOf(aConfig.mCodecType),
|
||||
|
@ -98,8 +98,8 @@ FormatResult ToMediaFormat(const ConfigType& aConfig) {
|
|||
// Ensure interval >= 1. A negative value means no key frames are
|
||||
// requested after the first frame. A zero value means a stream
|
||||
// containing all key frames is requested.
|
||||
int32_t intervalInSec =
|
||||
std::max<size_t>(1, aConfig.mKeyframeInterval / aConfig.mFramerate);
|
||||
int32_t intervalInSec = std::max<size_t>(
|
||||
1, aConfig.mCodecSpecific.value().mKeyframeInterval / aConfig.mFramerate);
|
||||
rv = format->SetInteger(java::sdk::MediaFormat::KEY_I_FRAME_INTERVAL,
|
||||
intervalInSec);
|
||||
NS_ENSURE_SUCCESS(rv,
|
||||
|
@ -109,9 +109,7 @@ FormatResult ToMediaFormat(const ConfigType& aConfig) {
|
|||
return format;
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
RefPtr<MediaDataEncoder::InitPromise>
|
||||
AndroidDataEncoder<ConfigType>::ProcessInit() {
|
||||
RefPtr<MediaDataEncoder::InitPromise> AndroidDataEncoder::ProcessInit() {
|
||||
AssertOnTaskQueue();
|
||||
MOZ_ASSERT(!mJavaEncoder);
|
||||
|
||||
|
@ -121,7 +119,7 @@ AndroidDataEncoder<ConfigType>::ProcessInit() {
|
|||
}
|
||||
mInputBufferInfo = bufferInfo;
|
||||
|
||||
FormatResult result = ToMediaFormat<ConfigType>(mConfig);
|
||||
FormatResult result = ToMediaFormat(mConfig);
|
||||
if (result.isErr()) {
|
||||
return InitPromise::CreateAndReject(result.unwrapErr(), __func__);
|
||||
}
|
||||
|
@ -155,8 +153,7 @@ AndroidDataEncoder<ConfigType>::ProcessInit() {
|
|||
return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder<ConfigType>::Encode(
|
||||
RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::Encode(
|
||||
const MediaData* aSample) {
|
||||
RefPtr<AndroidDataEncoder> self = this;
|
||||
MOZ_ASSERT(aSample != nullptr);
|
||||
|
@ -192,9 +189,8 @@ static jni::ByteBuffer::LocalRef ConvertI420ToNV12Buffer(
|
|||
return jni::ByteBuffer::New(aYUVBuffer->Elements(), aYUVBuffer->Length());
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
RefPtr<MediaDataEncoder::EncodePromise>
|
||||
AndroidDataEncoder<ConfigType>::ProcessEncode(RefPtr<const MediaData> aSample) {
|
||||
RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::ProcessEncode(
|
||||
RefPtr<const MediaData> aSample) {
|
||||
AssertOnTaskQueue();
|
||||
|
||||
REJECT_IF_ERROR();
|
||||
|
@ -266,8 +262,7 @@ static RefPtr<MediaByteBuffer> ExtractCodecConfig(
|
|||
return avcc;
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
void AndroidDataEncoder<ConfigType>::ProcessOutput(
|
||||
void AndroidDataEncoder::ProcessOutput(
|
||||
java::Sample::GlobalRef&& aSample,
|
||||
java::SampleBuffer::GlobalRef&& aBuffer) {
|
||||
if (!mTaskQueue->IsCurrentThreadIn()) {
|
||||
|
@ -332,29 +327,7 @@ void AndroidDataEncoder<ConfigType>::ProcessOutput(
|
|||
}
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
RefPtr<MediaRawData> AndroidDataEncoder<ConfigType>::GetOutputData(
|
||||
java::SampleBuffer::Param aBuffer, const int32_t aOffset,
|
||||
const int32_t aSize, const bool aIsKeyFrame) {
|
||||
// Copy frame data from Java buffer.
|
||||
auto output = MakeRefPtr<MediaRawData>();
|
||||
UniquePtr<MediaRawDataWriter> writer(output->CreateWriter());
|
||||
if (!writer->SetSize(aSize)) {
|
||||
AND_ENC_LOGE("fail to allocate output buffer");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
jni::ByteBuffer::LocalRef buf = jni::ByteBuffer::New(writer->Data(), aSize);
|
||||
aBuffer->WriteToByteBuffer(buf, aOffset, aSize);
|
||||
output->mKeyframe = aIsKeyFrame;
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
// AVC/H.264 frame can be in avcC or Annex B and needs extra convertion steps.
|
||||
template <>
|
||||
RefPtr<MediaRawData>
|
||||
AndroidDataEncoder<MediaDataEncoder::H264Config>::GetOutputData(
|
||||
RefPtr<MediaRawData> AndroidDataEncoder::GetOutputData(
|
||||
java::SampleBuffer::Param aBuffer, const int32_t aOffset,
|
||||
const int32_t aSize, const bool aIsKeyFrame) {
|
||||
auto output = MakeRefPtr<MediaRawData>();
|
||||
|
@ -394,16 +367,12 @@ AndroidDataEncoder<MediaDataEncoder::H264Config>::GetOutputData(
|
|||
return output;
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
RefPtr<MediaDataEncoder::EncodePromise>
|
||||
AndroidDataEncoder<ConfigType>::Drain() {
|
||||
RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::Drain() {
|
||||
return InvokeAsync(mTaskQueue, this, __func__,
|
||||
&AndroidDataEncoder<ConfigType>::ProcessDrain);
|
||||
&AndroidDataEncoder::ProcessDrain);
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
RefPtr<MediaDataEncoder::EncodePromise>
|
||||
AndroidDataEncoder<ConfigType>::ProcessDrain() {
|
||||
RefPtr<MediaDataEncoder::EncodePromise> AndroidDataEncoder::ProcessDrain() {
|
||||
AssertOnTaskQueue();
|
||||
MOZ_ASSERT(mJavaEncoder);
|
||||
MOZ_ASSERT(mDrainPromise.IsEmpty());
|
||||
|
@ -432,14 +401,12 @@ AndroidDataEncoder<ConfigType>::ProcessDrain() {
|
|||
}
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
RefPtr<ShutdownPromise> AndroidDataEncoder<ConfigType>::Shutdown() {
|
||||
RefPtr<ShutdownPromise> AndroidDataEncoder::Shutdown() {
|
||||
return InvokeAsync(mTaskQueue, this, __func__,
|
||||
&AndroidDataEncoder<ConfigType>::ProcessShutdown);
|
||||
&AndroidDataEncoder::ProcessShutdown);
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
RefPtr<ShutdownPromise> AndroidDataEncoder<ConfigType>::ProcessShutdown() {
|
||||
RefPtr<ShutdownPromise> AndroidDataEncoder::ProcessShutdown() {
|
||||
AssertOnTaskQueue();
|
||||
if (mJavaEncoder) {
|
||||
mJavaEncoder->Release();
|
||||
|
@ -457,8 +424,7 @@ RefPtr<ShutdownPromise> AndroidDataEncoder<ConfigType>::ProcessShutdown() {
|
|||
return ShutdownPromise::CreateAndResolve(true, __func__);
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
RefPtr<GenericPromise> AndroidDataEncoder<ConfigType>::SetBitrate(
|
||||
RefPtr<GenericPromise> AndroidDataEncoder::SetBitrate(
|
||||
const MediaDataEncoder::Rate aBitsPerSec) {
|
||||
RefPtr<AndroidDataEncoder> self(this);
|
||||
return InvokeAsync(mTaskQueue, __func__, [self, aBitsPerSec]() {
|
||||
|
@ -469,12 +435,10 @@ RefPtr<GenericPromise> AndroidDataEncoder<ConfigType>::SetBitrate(
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
void AndroidDataEncoder<ConfigType>::Error(const MediaResult& aError) {
|
||||
void AndroidDataEncoder::Error(const MediaResult& aError) {
|
||||
if (!mTaskQueue->IsCurrentThreadIn()) {
|
||||
nsresult rv = mTaskQueue->Dispatch(NewRunnableMethod<MediaResult>(
|
||||
"AndroidDataEncoder::Error", this,
|
||||
&AndroidDataEncoder<ConfigType>::Error, aError));
|
||||
"AndroidDataEncoder::Error", this, &AndroidDataEncoder::Error, aError));
|
||||
MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
|
||||
Unused << rv;
|
||||
return;
|
||||
|
@ -484,30 +448,22 @@ void AndroidDataEncoder<ConfigType>::Error(const MediaResult& aError) {
|
|||
mError = Some(aError);
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
void AndroidDataEncoder<ConfigType>::CallbacksSupport::HandleInput(
|
||||
int64_t aTimestamp, bool aProcessed) {}
|
||||
void AndroidDataEncoder::CallbacksSupport::HandleInput(int64_t aTimestamp,
|
||||
bool aProcessed) {}
|
||||
|
||||
template <typename ConfigType>
|
||||
void AndroidDataEncoder<ConfigType>::CallbacksSupport::HandleOutput(
|
||||
void AndroidDataEncoder::CallbacksSupport::HandleOutput(
|
||||
java::Sample::Param aSample, java::SampleBuffer::Param aBuffer) {
|
||||
mEncoder->ProcessOutput(std::move(aSample), std::move(aBuffer));
|
||||
}
|
||||
|
||||
template <typename ConfigType>
|
||||
void AndroidDataEncoder<ConfigType>::CallbacksSupport::
|
||||
HandleOutputFormatChanged(java::sdk::MediaFormat::Param aFormat) {}
|
||||
void AndroidDataEncoder::CallbacksSupport::HandleOutputFormatChanged(
|
||||
java::sdk::MediaFormat::Param aFormat) {}
|
||||
|
||||
template <typename ConfigType>
|
||||
void AndroidDataEncoder<ConfigType>::CallbacksSupport::HandleError(
|
||||
void AndroidDataEncoder::CallbacksSupport::HandleError(
|
||||
const MediaResult& aError) {
|
||||
mEncoder->Error(aError);
|
||||
}
|
||||
|
||||
// Force compiler to generate code.
|
||||
template class AndroidDataEncoder<MediaDataEncoder::H264Config>;
|
||||
template class AndroidDataEncoder<MediaDataEncoder::VP8Config>;
|
||||
template class AndroidDataEncoder<MediaDataEncoder::VP9Config>;
|
||||
} // namespace mozilla
|
||||
|
||||
#undef AND_ENC_LOG
|
||||
|
|
|
@ -16,10 +16,11 @@
|
|||
|
||||
namespace mozilla {
|
||||
|
||||
template <typename ConfigType>
|
||||
class AndroidDataEncoder final : public MediaDataEncoder {
|
||||
public:
|
||||
AndroidDataEncoder(const ConfigType& aConfig, RefPtr<TaskQueue> aTaskQueue)
|
||||
using Config = H264Config;
|
||||
|
||||
AndroidDataEncoder(const Config& aConfig, RefPtr<TaskQueue> aTaskQueue)
|
||||
: mConfig(aConfig), mTaskQueue(aTaskQueue) {
|
||||
MOZ_ASSERT(mConfig.mSize.width > 0 && mConfig.mSize.height > 0);
|
||||
MOZ_ASSERT(mTaskQueue);
|
||||
|
@ -67,7 +68,7 @@ class AndroidDataEncoder final : public MediaDataEncoder {
|
|||
MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
|
||||
}
|
||||
|
||||
const ConfigType mConfig;
|
||||
Config mConfig;
|
||||
|
||||
RefPtr<TaskQueue> mTaskQueue;
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
|
||||
#include "AndroidDataEncoder.h"
|
||||
#include "MP4Decoder.h"
|
||||
#include "VPXDecoder.h"
|
||||
|
||||
#include "mozilla/Logging.h"
|
||||
|
||||
|
@ -18,34 +17,14 @@ extern LazyLogModule sPEMLog;
|
|||
("AndroidEncoderModule(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
|
||||
|
||||
bool AndroidEncoderModule::SupportsMimeType(const nsACString& aMimeType) const {
|
||||
return (MP4Decoder::IsH264(aMimeType) &&
|
||||
java::HardwareCodecCapabilityUtils::HasHWH264(true /* encoder */)) ||
|
||||
(VPXDecoder::IsVP8(aMimeType) &&
|
||||
java::HardwareCodecCapabilityUtils::HasHWVP8(true /* encoder */)) ||
|
||||
(VPXDecoder::IsVP9(aMimeType) &&
|
||||
java::HardwareCodecCapabilityUtils::HasHWVP9(true /* encoder */));
|
||||
return MP4Decoder::IsH264(aMimeType);
|
||||
}
|
||||
|
||||
already_AddRefed<MediaDataEncoder> AndroidEncoderModule::CreateVideoEncoder(
|
||||
const CreateEncoderParams& aParams) const {
|
||||
RefPtr<MediaDataEncoder> encoder;
|
||||
switch (CreateEncoderParams::CodecTypeForMime(aParams.mConfig.mMimeType)) {
|
||||
case MediaDataEncoder::CodecType::H264:
|
||||
return MakeRefPtr<AndroidDataEncoder<MediaDataEncoder::H264Config>>(
|
||||
aParams.ToH264Config(), aParams.mTaskQueue)
|
||||
.forget();
|
||||
case MediaDataEncoder::CodecType::VP8:
|
||||
return MakeRefPtr<AndroidDataEncoder<MediaDataEncoder::VP8Config>>(
|
||||
aParams.ToVP8Config(), aParams.mTaskQueue)
|
||||
.forget();
|
||||
case MediaDataEncoder::CodecType::VP9:
|
||||
return MakeRefPtr<AndroidDataEncoder<MediaDataEncoder::VP9Config>>(
|
||||
aParams.ToVP9Config(), aParams.mTaskQueue)
|
||||
.forget();
|
||||
default:
|
||||
AND_PEM_LOG("Unsupported MIME type:%s", aParams.mConfig.mMimeType.get());
|
||||
return nullptr;
|
||||
}
|
||||
RefPtr<MediaDataEncoder> encoder =
|
||||
new AndroidDataEncoder(aParams.ToH264Config(), aParams.mTaskQueue);
|
||||
return encoder.forget();
|
||||
}
|
||||
|
||||
} // namespace mozilla
|
||||
|
|
|
@ -133,23 +133,6 @@ RefPtr<MediaDataEncoder::InitPromise> AppleVTEncoder::Init() {
|
|||
__func__);
|
||||
}
|
||||
|
||||
int64_t interval =
|
||||
mConfig.mKeyframeInterval > std::numeric_limits<int64_t>::max()
|
||||
? std::numeric_limits<int64_t>::max()
|
||||
: mConfig.mKeyframeInterval;
|
||||
AutoCFRelease<CFNumberRef> cf(
|
||||
CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &interval));
|
||||
if (VTSessionSetProperty(mSession,
|
||||
kVTCompressionPropertyKey_MaxKeyFrameInterval,
|
||||
cf) != noErr) {
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(
|
||||
NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
nsPrintfCString("fail to configurate keyframe interval:%" PRId64,
|
||||
interval)),
|
||||
__func__);
|
||||
}
|
||||
|
||||
if (mConfig.mCodecSpecific) {
|
||||
const H264Specific& specific = mConfig.mCodecSpecific.ref();
|
||||
if (!SetProfileLevel(mSession, specific.mProfileLevel)) {
|
||||
|
@ -159,6 +142,20 @@ RefPtr<MediaDataEncoder::InitPromise> AppleVTEncoder::Init() {
|
|||
specific.mProfileLevel)),
|
||||
__func__);
|
||||
}
|
||||
|
||||
int64_t interval = specific.mKeyframeInterval;
|
||||
AutoCFRelease<CFNumberRef> cf(
|
||||
CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &interval));
|
||||
if (VTSessionSetProperty(mSession,
|
||||
kVTCompressionPropertyKey_MaxKeyFrameInterval,
|
||||
cf) != noErr) {
|
||||
return InitPromise::CreateAndReject(
|
||||
MediaResult(
|
||||
NS_ERROR_DOM_MEDIA_FATAL_ERR,
|
||||
nsPrintfCString("fail to configurate keyframe interval:%" PRId64,
|
||||
interval)),
|
||||
__func__);
|
||||
}
|
||||
}
|
||||
|
||||
CFBooleanRef isUsingHW = nullptr;
|
||||
|
|
|
@ -74,15 +74,7 @@ static const char* ErrorStr(HRESULT hr) {
|
|||
}
|
||||
|
||||
static const char* CodecStr(const GUID& aGUID) {
|
||||
if (IsEqualGUID(aGUID, MFVideoFormat_H264)) {
|
||||
return "H.264";
|
||||
} else if (IsEqualGUID(aGUID, MFVideoFormat_VP80)) {
|
||||
return "VP8";
|
||||
} else if (IsEqualGUID(aGUID, MFVideoFormat_VP90)) {
|
||||
return "VP9";
|
||||
} else {
|
||||
return "Unsupported codec";
|
||||
}
|
||||
return IsEqualGUID(aGUID, MFVideoFormat_H264) ? "H.264" : "???";
|
||||
}
|
||||
|
||||
static UINT32 EnumHW(const GUID& aSubtype, IMFActivate**& aActivates) {
|
||||
|
@ -168,8 +160,6 @@ nsTArray<MFTEncoder::Info> MFTEncoder::Enumerate() {
|
|||
}
|
||||
|
||||
PopulateHWEncoderInfo(MFVideoFormat_H264, infos);
|
||||
PopulateHWEncoderInfo(MFVideoFormat_VP90, infos);
|
||||
PopulateHWEncoderInfo(MFVideoFormat_VP80, infos);
|
||||
|
||||
wmf::MFShutdown();
|
||||
return infos;
|
||||
|
|
|
@ -6,32 +6,36 @@
|
|||
|
||||
#include "WMFEncoderModule.h"
|
||||
|
||||
#include "MP4Decoder.h"
|
||||
#include "WMFMediaDataEncoder.h"
|
||||
|
||||
namespace mozilla {
|
||||
extern LazyLogModule sPEMLog;
|
||||
|
||||
static MediaDataEncoder::CodecType MimeTypeToCodecType(
|
||||
const nsACString& aMimeType) {
|
||||
if (MP4Decoder::IsH264(aMimeType)) {
|
||||
return MediaDataEncoder::CodecType::H264;
|
||||
} else {
|
||||
MOZ_ASSERT(false, "Unsupported Mimetype");
|
||||
return MediaDataEncoder::CodecType::Unknown;
|
||||
}
|
||||
}
|
||||
|
||||
bool WMFEncoderModule::SupportsMimeType(const nsACString& aMimeType) const {
|
||||
return CanCreateWMFEncoder(CreateEncoderParams::CodecTypeForMime(aMimeType));
|
||||
return CanCreateWMFEncoder(MimeTypeToCodecType(aMimeType));
|
||||
}
|
||||
|
||||
already_AddRefed<MediaDataEncoder> WMFEncoderModule::CreateVideoEncoder(
|
||||
const CreateEncoderParams& aParams) const {
|
||||
MediaDataEncoder::CodecType codec =
|
||||
CreateEncoderParams::CodecTypeForMime(aParams.mConfig.mMimeType);
|
||||
MimeTypeToCodecType(aParams.mConfig.mMimeType);
|
||||
RefPtr<MediaDataEncoder> encoder;
|
||||
switch (codec) {
|
||||
case MediaDataEncoder::CodecType::H264:
|
||||
encoder = new WMFMediaDataEncoder<MediaDataEncoder::H264Config>(
|
||||
aParams.ToH264Config(), aParams.mTaskQueue);
|
||||
break;
|
||||
case MediaDataEncoder::CodecType::VP8:
|
||||
encoder = new WMFMediaDataEncoder<MediaDataEncoder::VP8Config>(
|
||||
aParams.ToVP8Config(), aParams.mTaskQueue);
|
||||
case MediaDataEncoder::CodecType::VP9:
|
||||
encoder = new WMFMediaDataEncoder<MediaDataEncoder::VP9Config>(
|
||||
aParams.ToVP9Config(), aParams.mTaskQueue);
|
||||
break;
|
||||
default:
|
||||
// Do nothing.
|
||||
break;
|
||||
|
|
|
@ -27,10 +27,6 @@ static const GUID CodecToSubtype(MediaDataEncoder::CodecType aCodec) {
|
|||
switch (aCodec) {
|
||||
case MediaDataEncoder::CodecType::H264:
|
||||
return MFVideoFormat_H264;
|
||||
case MediaDataEncoder::CodecType::VP8:
|
||||
return MFVideoFormat_VP80;
|
||||
case MediaDataEncoder::CodecType::VP9:
|
||||
return MFVideoFormat_VP90;
|
||||
default:
|
||||
MOZ_ASSERT(false, "Unsupported codec");
|
||||
return GUID_NULL;
|
||||
|
@ -190,14 +186,9 @@ already_AddRefed<IMFMediaType> CreateInputType(Config& aConfig) {
|
|||
: nullptr;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
HRESULT SetCodecSpecific(IMFMediaType* aOutputType, const T& aSpecific) {
|
||||
return S_OK;
|
||||
}
|
||||
|
||||
template <>
|
||||
HRESULT SetCodecSpecific(IMFMediaType* aOutputType,
|
||||
const MediaDataEncoder::H264Specific& aSpecific) {
|
||||
static HRESULT SetCodecSpecific(
|
||||
IMFMediaType* aOutputType,
|
||||
const MediaDataEncoder::H264Specific& aSpecific) {
|
||||
return aOutputType->SetUINT32(MF_MT_MPEG2_PROFILE,
|
||||
GetProfile(aSpecific.mProfileLevel));
|
||||
}
|
||||
|
@ -455,9 +446,11 @@ RefPtr<GenericPromise> WMFMediaDataEncoder<T>::SetBitrate(
|
|||
});
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
nsCString WMFMediaDataEncoder<T>::GetDescriptionName() const {
|
||||
return MFTEncoder::GetFriendlyName(CodecToSubtype(mConfig.mCodecType));
|
||||
template <>
|
||||
nsCString
|
||||
WMFMediaDataEncoder<MediaDataEncoder::H264Config>::GetDescriptionName() const {
|
||||
MOZ_ASSERT(mConfig.mCodecType == CodecType::H264);
|
||||
return MFTEncoder::GetFriendlyName(MFVideoFormat_H264);
|
||||
}
|
||||
|
||||
} // namespace mozilla
|
||||
|
|
|
@ -15,9 +15,12 @@ namespace mozilla {
|
|||
/* static */
|
||||
WebrtcVideoEncoder* MediaDataCodec::CreateEncoder(
|
||||
webrtc::VideoCodecType aCodecType) {
|
||||
return WebrtcMediaDataEncoder::CanCreate(aCodecType)
|
||||
? new WebrtcVideoEncoderProxy(new WebrtcMediaDataEncoder())
|
||||
: nullptr;
|
||||
#if defined(MOZ_APPLEMEDIA) || defined(MOZ_WIDGET_ANDROID) || defined(MOZ_WMF)
|
||||
if (aCodecType == webrtc::VideoCodecType::kVideoCodecH264) {
|
||||
return new WebrtcVideoEncoderProxy(new WebrtcMediaDataEncoder());
|
||||
}
|
||||
#endif
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
/* static */
|
||||
|
|
|
@ -34,6 +34,16 @@ using namespace media;
|
|||
using namespace layers;
|
||||
using MimeTypeResult = Maybe<nsLiteralCString>;
|
||||
|
||||
static const char* GetModeName(webrtc::H264PacketizationMode aMode) {
|
||||
if (aMode == webrtc::H264PacketizationMode::SingleNalUnit) {
|
||||
return "SingleNalUnit";
|
||||
}
|
||||
if (aMode == webrtc::H264PacketizationMode::NonInterleaved) {
|
||||
return "NonInterleaved";
|
||||
}
|
||||
return "Unknown";
|
||||
}
|
||||
|
||||
static MimeTypeResult ConvertWebrtcCodecTypeToMimeType(
|
||||
const webrtc::VideoCodecType& aType) {
|
||||
switch (aType) {
|
||||
|
@ -44,34 +54,11 @@ static MimeTypeResult ConvertWebrtcCodecTypeToMimeType(
|
|||
case webrtc::VideoCodecType::kVideoCodecH264:
|
||||
return Some("video/avc"_ns);
|
||||
default:
|
||||
MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("Unsupported codec type");
|
||||
break;
|
||||
}
|
||||
return Nothing();
|
||||
}
|
||||
|
||||
bool WebrtcMediaDataEncoder::CanCreate(
|
||||
const webrtc::VideoCodecType aCodecType) {
|
||||
auto factory = MakeRefPtr<PEMFactory>();
|
||||
MimeTypeResult mimeType = ConvertWebrtcCodecTypeToMimeType(aCodecType);
|
||||
return mimeType ? factory->SupportsMimeType(mimeType.ref()) : false;
|
||||
}
|
||||
|
||||
static const char* PacketModeStr(const webrtc::CodecSpecificInfo& aInfo) {
|
||||
MOZ_ASSERT(aInfo.codecType != webrtc::VideoCodecType::kVideoCodecUnknown);
|
||||
|
||||
if (aInfo.codecType != webrtc::VideoCodecType::kVideoCodecH264) {
|
||||
return "N/A";
|
||||
}
|
||||
switch (aInfo.codecSpecific.H264.packetization_mode) {
|
||||
case webrtc::H264PacketizationMode::SingleNalUnit:
|
||||
return "SingleNalUnit";
|
||||
case webrtc::H264PacketizationMode::NonInterleaved:
|
||||
return "NonInterleaved";
|
||||
default:
|
||||
return "Unknown";
|
||||
}
|
||||
}
|
||||
|
||||
static MediaDataEncoder::H264Specific::ProfileLevel ConvertProfileLevel(
|
||||
webrtc::H264::Profile aProfile) {
|
||||
if (aProfile == webrtc::H264::kProfileConstrainedBaseline ||
|
||||
|
@ -81,20 +68,11 @@ static MediaDataEncoder::H264Specific::ProfileLevel ConvertProfileLevel(
|
|||
return MediaDataEncoder::H264Specific::ProfileLevel::MainAutoLevel;
|
||||
}
|
||||
|
||||
static MediaDataEncoder::VPXSpecific::Complexity MapComplexity(
|
||||
webrtc::VideoCodecComplexity aComplexity) {
|
||||
switch (aComplexity) {
|
||||
case webrtc::VideoCodecComplexity::kComplexityNormal:
|
||||
return MediaDataEncoder::VPXSpecific::Complexity::Normal;
|
||||
case webrtc::VideoCodecComplexity::kComplexityHigh:
|
||||
return MediaDataEncoder::VPXSpecific::Complexity::High;
|
||||
case webrtc::VideoCodecComplexity::kComplexityHigher:
|
||||
return MediaDataEncoder::VPXSpecific::Complexity::Higher;
|
||||
case webrtc::VideoCodecComplexity::kComplexityMax:
|
||||
return MediaDataEncoder::VPXSpecific::Complexity::Max;
|
||||
default:
|
||||
MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("Bad complexity value");
|
||||
}
|
||||
static MediaDataEncoder::H264Specific GetCodecSpecific(
|
||||
const webrtc::VideoCodec* aCodecSettings) {
|
||||
return MediaDataEncoder::H264Specific(
|
||||
aCodecSettings->H264().keyFrameInterval,
|
||||
ConvertProfileLevel(aCodecSettings->H264().profile));
|
||||
}
|
||||
|
||||
WebrtcMediaDataEncoder::WebrtcMediaDataEncoder()
|
||||
|
@ -111,39 +89,14 @@ WebrtcMediaDataEncoder::WebrtcMediaDataEncoder()
|
|||
// drastically reduced bitrate, so we want to avoid that. In steady state
|
||||
// conditions, 0.95 seems to give us better overall bitrate over long
|
||||
// periods of time.
|
||||
mBitrateAdjuster(webrtc::Clock::GetRealTimeClock(), 0.5, 0.95) {
|
||||
PodZero(&mCodecSpecific.codecSpecific);
|
||||
}
|
||||
|
||||
static void InitCodecSpecficInfo(webrtc::CodecSpecificInfo& aInfo,
|
||||
const webrtc::VideoCodec* aCodecSettings) {
|
||||
MOZ_ASSERT(aCodecSettings);
|
||||
|
||||
aInfo.codecType = aCodecSettings->codecType;
|
||||
switch (aCodecSettings->codecType) {
|
||||
case webrtc::VideoCodecType::kVideoCodecH264: {
|
||||
aInfo.codecSpecific.H264.packetization_mode =
|
||||
aCodecSettings->H264().packetizationMode == 1
|
||||
? webrtc::H264PacketizationMode::NonInterleaved
|
||||
: webrtc::H264PacketizationMode::SingleNalUnit;
|
||||
break;
|
||||
}
|
||||
case webrtc::VideoCodecType::kVideoCodecVP9: {
|
||||
MOZ_ASSERT(aCodecSettings->VP9().numberOfSpatialLayers == 1);
|
||||
aInfo.codecSpecific.VP9.flexible_mode =
|
||||
aCodecSettings->VP9().flexibleMode;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
mBitrateAdjuster(webrtc::Clock::GetRealTimeClock(), 0.5, 0.95) {}
|
||||
|
||||
int32_t WebrtcMediaDataEncoder::InitEncode(
|
||||
const webrtc::VideoCodec* aCodecSettings, int32_t aNumberOfCores,
|
||||
size_t aMaxPayloadSize) {
|
||||
MOZ_ASSERT(aCodecSettings);
|
||||
MOZ_ASSERT(aCodecSettings->numberOfSimulcastStreams == 1);
|
||||
MOZ_ASSERT(
|
||||
aCodecSettings->codecType == webrtc::VideoCodecType::kVideoCodecH264,
|
||||
"Only support h264 for now.");
|
||||
|
||||
if (mEncoder) {
|
||||
// Clean existing encoder.
|
||||
|
@ -155,9 +108,8 @@ int32_t WebrtcMediaDataEncoder::InitEncode(
|
|||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
||||
InitCodecSpecficInfo(mCodecSpecific, aCodecSettings);
|
||||
LOG("Init encode, mimeType %s, mode %s", mInfo.mMimeType.get(),
|
||||
PacketModeStr(mCodecSpecific));
|
||||
GetModeName(mMode));
|
||||
if (!media::Await(do_AddRef(mTaskQueue), encoder->Init()).IsResolve()) {
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
@ -175,6 +127,9 @@ bool WebrtcMediaDataEncoder::SetupConfig(
|
|||
}
|
||||
mInfo = VideoInfo(aCodecSettings->width, aCodecSettings->height);
|
||||
mInfo.mMimeType = mimeType.extract();
|
||||
mMode = aCodecSettings->H264().packetizationMode == 1
|
||||
? webrtc::H264PacketizationMode::NonInterleaved
|
||||
: webrtc::H264PacketizationMode::SingleNalUnit;
|
||||
mMaxFrameRate = aCodecSettings->maxFramerate;
|
||||
// Those bitrates in codec setting are all kbps, so we have to covert them to
|
||||
// bps.
|
||||
|
@ -192,60 +147,14 @@ already_AddRefed<MediaDataEncoder> WebrtcMediaDataEncoder::CreateEncoder(
|
|||
LOG("Request platform encoder for %s, bitRate=%u bps, frameRate=%u",
|
||||
mInfo.mMimeType.get(), mBitrateAdjuster.GetTargetBitrateBps(),
|
||||
aCodecSettings->maxFramerate);
|
||||
|
||||
size_t keyframeInterval = 1;
|
||||
switch (aCodecSettings->codecType) {
|
||||
case webrtc::VideoCodecType::kVideoCodecH264: {
|
||||
keyframeInterval = aCodecSettings->H264().keyFrameInterval;
|
||||
break;
|
||||
}
|
||||
case webrtc::VideoCodecType::kVideoCodecVP8: {
|
||||
keyframeInterval = aCodecSettings->VP8().keyFrameInterval;
|
||||
break;
|
||||
}
|
||||
case webrtc::VideoCodecType::kVideoCodecVP9: {
|
||||
keyframeInterval = aCodecSettings->VP9().keyFrameInterval;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
MOZ_ASSERT_UNREACHABLE("Unsupported codec type");
|
||||
return nullptr;
|
||||
}
|
||||
CreateEncoderParams params(
|
||||
return mFactory->CreateEncoder(CreateEncoderParams(
|
||||
mInfo, MediaDataEncoder::Usage::Realtime,
|
||||
MakeRefPtr<TaskQueue>(
|
||||
GetMediaThreadPool(MediaThreadType::PLATFORM_ENCODER),
|
||||
"WebrtcMediaDataEncoder::mEncoder"),
|
||||
MediaDataEncoder::PixelFormat::YUV420P, aCodecSettings->maxFramerate,
|
||||
keyframeInterval, mBitrateAdjuster.GetTargetBitrateBps());
|
||||
switch (aCodecSettings->codecType) {
|
||||
case webrtc::VideoCodecType::kVideoCodecH264: {
|
||||
params.SetCodecSpecific(MediaDataEncoder::H264Specific(
|
||||
ConvertProfileLevel(aCodecSettings->H264().profile)));
|
||||
break;
|
||||
}
|
||||
case webrtc::VideoCodecType::kVideoCodecVP8: {
|
||||
const webrtc::VideoCodecVP8& vp8 = aCodecSettings->VP8();
|
||||
params.SetCodecSpecific(MediaDataEncoder::VPXSpecific::VP8(
|
||||
MapComplexity(vp8.complexity),
|
||||
vp8.resilience != webrtc::VP8ResilienceMode::kResilienceOff,
|
||||
vp8.numberOfTemporalLayers, vp8.denoisingOn, vp8.automaticResizeOn,
|
||||
vp8.frameDroppingOn));
|
||||
break;
|
||||
}
|
||||
case webrtc::VideoCodecType::kVideoCodecVP9: {
|
||||
const webrtc::VideoCodecVP9& vp9 = aCodecSettings->VP9();
|
||||
params.SetCodecSpecific(MediaDataEncoder::VPXSpecific::VP9(
|
||||
MapComplexity(vp9.complexity), vp9.resilienceOn,
|
||||
vp9.numberOfTemporalLayers, vp9.denoisingOn, vp9.automaticResizeOn,
|
||||
vp9.frameDroppingOn, vp9.adaptiveQpMode, vp9.numberOfSpatialLayers,
|
||||
vp9.flexibleMode));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("Unsupported codec type");
|
||||
}
|
||||
return mFactory->CreateEncoder(params);
|
||||
mBitrateAdjuster.GetTargetBitrateBps(),
|
||||
GetCodecSpecific(aCodecSettings)));
|
||||
}
|
||||
|
||||
int32_t WebrtcMediaDataEncoder::RegisterEncodeCompleteCallback(
|
||||
|
@ -299,44 +208,6 @@ static already_AddRefed<VideoData> CreateVideoDataFromWebrtcVideoFrame(
|
|||
TimeUnit::FromMicroseconds(aFrame.timestamp()));
|
||||
}
|
||||
|
||||
static void UpdateCodecSpecificInfo(webrtc::CodecSpecificInfo& aInfo,
|
||||
const gfx::IntSize& aSize,
|
||||
const bool aIsKeyframe) {
|
||||
switch (aInfo.codecType) {
|
||||
case webrtc::VideoCodecType::kVideoCodecVP8: {
|
||||
// See webrtc::VP8EncoderImpl::PopulateCodecSpecific().
|
||||
webrtc::CodecSpecificInfoVP8& vp8 = aInfo.codecSpecific.VP8;
|
||||
vp8.pictureId = (vp8.pictureId + 1) & 0x7FFF;
|
||||
vp8.simulcastIdx = 0;
|
||||
vp8.keyIdx = webrtc::kNoKeyIdx;
|
||||
// One temporal layer only.
|
||||
vp8.temporalIdx = 1;
|
||||
vp8.layerSync = false;
|
||||
vp8.tl0PicIdx = webrtc::kNoTl0PicIdx;
|
||||
break;
|
||||
}
|
||||
case webrtc::VideoCodecType::kVideoCodecVP9: {
|
||||
// See webrtc::VP9EncoderImpl::PopulateCodecSpecific().
|
||||
webrtc::CodecSpecificInfoVP9& vp9 = aInfo.codecSpecific.VP9;
|
||||
vp9.picture_id = (vp9.picture_id + 1) & 0x7FFF;
|
||||
vp9.inter_pic_predicted = aIsKeyframe;
|
||||
vp9.ss_data_available = aIsKeyframe && !vp9.flexible_mode;
|
||||
// One temporal & spatial layer only.
|
||||
vp9.temporal_idx = webrtc::kNoTemporalIdx;
|
||||
vp9.spatial_idx = webrtc::kNoSpatialIdx;
|
||||
vp9.temporal_up_switch = false;
|
||||
vp9.tl0_pic_idx = webrtc::kNoTl0PicIdx;
|
||||
vp9.num_spatial_layers = 1;
|
||||
vp9.gof_idx = webrtc::kNoGofIdx;
|
||||
vp9.width[0] = aSize.width;
|
||||
vp9.height[0] = aSize.height;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
int32_t WebrtcMediaDataEncoder::Encode(
|
||||
const webrtc::VideoFrame& aInputFrame,
|
||||
const webrtc::CodecSpecificInfo* aCodecSpecificInfo,
|
||||
|
@ -394,13 +265,29 @@ int32_t WebrtcMediaDataEncoder::Encode(
|
|||
? webrtc::FrameType::kVideoFrameKey
|
||||
: webrtc::FrameType::kVideoFrameDelta;
|
||||
image._completeFrame = true;
|
||||
UpdateCodecSpecificInfo(mCodecSpecific, displaySize,
|
||||
frame->mKeyframe);
|
||||
webrtc::RTPFragmentationHeader fragHeader =
|
||||
GetFragHeader(mCodecSpecific.codecType, frame);
|
||||
|
||||
nsTArray<AnnexB::NALEntry> entries;
|
||||
AnnexB::ParseNALEntries(
|
||||
Span<const uint8_t>(frame->Data(), frame->Size()), entries);
|
||||
const size_t nalNums = entries.Length();
|
||||
LOG_V("NAL nums %zu", nalNums);
|
||||
MOZ_ASSERT(nalNums, "Should have at least 1 NALU in encoded frame!");
|
||||
|
||||
webrtc::RTPFragmentationHeader header;
|
||||
header.VerifyAndAllocateFragmentationHeader(nalNums);
|
||||
for (size_t idx = 0; idx < nalNums; idx++) {
|
||||
header.fragmentationOffset[idx] = entries[idx].mOffset;
|
||||
header.fragmentationLength[idx] = entries[idx].mSize;
|
||||
LOG_V("NAL offset %" PRId64 " size %" PRId64, entries[idx].mOffset,
|
||||
entries[idx].mSize);
|
||||
}
|
||||
|
||||
webrtc::CodecSpecificInfo codecSpecific;
|
||||
codecSpecific.codecType = webrtc::kVideoCodecH264;
|
||||
codecSpecific.codecSpecific.H264.packetization_mode = mMode;
|
||||
|
||||
LOG_V("Send encoded image");
|
||||
self->mCallback->OnEncodedImage(image, &mCodecSpecific, &fragHeader);
|
||||
self->mCallback->OnEncodedImage(image, &codecSpecific, &header);
|
||||
self->mBitrateAdjuster.Update(image._size);
|
||||
}
|
||||
},
|
||||
|
@ -410,42 +297,6 @@ int32_t WebrtcMediaDataEncoder::Encode(
|
|||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
webrtc::RTPFragmentationHeader WebrtcMediaDataEncoder::GetFragHeader(
|
||||
const webrtc::VideoCodecType aCodecType,
|
||||
const RefPtr<MediaRawData>& aFrame) {
|
||||
webrtc::RTPFragmentationHeader header;
|
||||
switch (aCodecType) {
|
||||
case webrtc::VideoCodecType::kVideoCodecH264: {
|
||||
nsTArray<AnnexB::NALEntry> entries;
|
||||
AnnexB::ParseNALEntries(
|
||||
Span<const uint8_t>(aFrame->Data(), aFrame->Size()), entries);
|
||||
const size_t nalNums = entries.Length();
|
||||
LOG_V("NAL nums %zu", nalNums);
|
||||
MOZ_ASSERT(nalNums, "Should have at least 1 NALU in encoded frame!");
|
||||
|
||||
header.VerifyAndAllocateFragmentationHeader(nalNums);
|
||||
for (size_t idx = 0; idx < nalNums; idx++) {
|
||||
header.fragmentationOffset[idx] = entries[idx].mOffset;
|
||||
header.fragmentationLength[idx] = entries[idx].mSize;
|
||||
LOG_V("NAL offset %" PRId64 " size %" PRId64, entries[idx].mOffset,
|
||||
entries[idx].mSize);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case webrtc::VideoCodecType::kVideoCodecVP8:
|
||||
[[fallthrough]];
|
||||
case webrtc::VideoCodecType::kVideoCodecVP9: {
|
||||
header.VerifyAndAllocateFragmentationHeader(1);
|
||||
header.fragmentationLength[0] = aFrame->Size();
|
||||
break;
|
||||
}
|
||||
default:
|
||||
MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("Unknown codec type");
|
||||
}
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
int32_t WebrtcMediaDataEncoder::SetChannelParameters(uint32_t aPacketLoss,
|
||||
int64_t aRtt) {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
|
|
|
@ -22,8 +22,6 @@ class TaskQueue;
|
|||
|
||||
class WebrtcMediaDataEncoder : public RefCountedWebrtcVideoEncoder {
|
||||
public:
|
||||
static bool CanCreate(const webrtc::VideoCodecType aCodecType);
|
||||
|
||||
WebrtcMediaDataEncoder();
|
||||
|
||||
uint64_t PluginID() const override { return 0; }
|
||||
|
@ -51,9 +49,6 @@ class WebrtcMediaDataEncoder : public RefCountedWebrtcVideoEncoder {
|
|||
already_AddRefed<MediaDataEncoder> CreateEncoder(
|
||||
const webrtc::VideoCodec* aCodecSettings);
|
||||
bool InitEncoder();
|
||||
webrtc::RTPFragmentationHeader GetFragHeader(
|
||||
const webrtc::VideoCodecType aCodecType,
|
||||
const RefPtr<MediaRawData>& aFrame);
|
||||
|
||||
const RefPtr<TaskQueue> mTaskQueue;
|
||||
const RefPtr<PEMFactory> mFactory;
|
||||
|
@ -64,7 +59,7 @@ class WebrtcMediaDataEncoder : public RefCountedWebrtcVideoEncoder {
|
|||
MediaResult mError = NS_OK;
|
||||
|
||||
VideoInfo mInfo;
|
||||
webrtc::CodecSpecificInfo mCodecSpecific;
|
||||
webrtc::H264PacketizationMode mMode;
|
||||
webrtc::BitrateAdjuster mBitrateAdjuster;
|
||||
uint32_t mMaxFrameRate;
|
||||
uint32_t mMinBitrateBps;
|
||||
|
|
|
@ -454,17 +454,16 @@ function setupEnvironment() {
|
|||
["media.navigator.video.max_fr", 10],
|
||||
["media.autoplay.default", Ci.nsIAutoplay.ALLOWED]
|
||||
);
|
||||
} else {
|
||||
// For platforms other than Android, the tests use Fake H.264 GMP encoder.
|
||||
// We can't use that with a real decoder until bug 1509012 is done.
|
||||
// So force using the Fake H.264 GMP decoder for now.
|
||||
defaultMochitestPrefs.set.push([
|
||||
"media.navigator.mediadatadecoder_h264_enabled",
|
||||
false,
|
||||
]);
|
||||
}
|
||||
|
||||
// All platforms but Linux support MediaDataEncoder and can use
|
||||
// MediaDataDecoder for H.264. Linux still uses Fake GMP encoder so the pref
|
||||
// stay disabled.
|
||||
// [TODO] re-enable after bug 1509012 is done or platform encoder implemented.
|
||||
defaultMochitestPrefs.set.push([
|
||||
"media.navigator.mediadatadecoder_h264_enabled",
|
||||
!navigator.userAgent.includes("Linux"),
|
||||
]);
|
||||
|
||||
// Running as a Mochitest.
|
||||
SimpleTest.requestFlakyTimeout("WebRTC inherently depends on timeouts");
|
||||
window.finish = () => SimpleTest.finish();
|
||||
|
|
|
@ -35,11 +35,7 @@ runNetworkTest(async function (options) {
|
|||
// [TODO] re-enable HW decoder after bug 1526207 is fixed.
|
||||
if (navigator.userAgent.includes("Android")) {
|
||||
await pushPrefs(["media.navigator.mediadatadecoder_vpx_enabled", false],
|
||||
["media.webrtc.hw.h264.enabled", false],
|
||||
// Android platform encoders don't support small (e.g.,
|
||||
// 80x60) or non-even (e.g., 20x15) resolutions.
|
||||
// [TODO] re-enable after SW fallback is implemented.
|
||||
["media.webrtc.platformencoder", false]);
|
||||
["media.webrtc.hw.h264.enabled", false]);
|
||||
}
|
||||
|
||||
let test = new PeerConnectionTest(options);
|
||||
|
|
|
@ -225,7 +225,6 @@ public final class HardwareCodecCapabilityUtils {
|
|||
return null;
|
||||
}
|
||||
|
||||
@WrapForJNI
|
||||
public static boolean hasHWVP8(final boolean aIsEncoder) {
|
||||
return getHWCodecCapability(VP8_MIME_TYPE, aIsEncoder);
|
||||
}
|
||||
|
@ -235,11 +234,6 @@ public final class HardwareCodecCapabilityUtils {
|
|||
return getHWCodecCapability(VP9_MIME_TYPE, aIsEncoder);
|
||||
}
|
||||
|
||||
@WrapForJNI
|
||||
public static boolean hasHWH264(final boolean aIsEncoder) {
|
||||
return getHWCodecCapability(H264_MIME_TYPE, aIsEncoder);
|
||||
}
|
||||
|
||||
@WrapForJNI(calledFrom = "gecko")
|
||||
public static boolean hasHWH264() {
|
||||
return getHWCodecCapability(H264_MIME_TYPE, true) &&
|
||||
|
|
|
@ -8902,7 +8902,7 @@
|
|||
|
||||
- name: media.webrtc.platformencoder
|
||||
type: bool
|
||||
#if defined(MOZ_WIDGET_ANDROID) || defined(NIGHTLY_BUILD)
|
||||
#if defined(MOZ_WIDGET_ANDROID)
|
||||
value: true
|
||||
#else
|
||||
value: false
|
||||
|
|
Загрузка…
Ссылка в новой задаче