Bug 1014393 - Remove EncodedFrameContainer. r=pehrsons

Remove EncodedFrameContainer and clean up areas where it was used.
EncodedFrameContainer provided a wrapper around an
nsTArray<RefPtr<EncodedFrame>>, but it simplifies the code to simply expose
this array. Also clean up unused enums in EncodedFrame, and clean up some of
the outdated comments for our encoded frame handling.

MozReview-Commit-ID: Bh3VKesVoJE

Differential Revision: https://phabricator.services.mozilla.com/D35384

--HG--
rename : dom/media/encoder/EncodedFrameContainer.h => dom/media/encoder/EncodedFrame.h
extra : moz-landing-system : lando
This commit is contained in:
Bryce Van Dyk 2019-07-12 13:40:33 +00:00
Родитель c949841839
Коммит 79c5e43bd4
16 изменённых файлов: 156 добавлений и 218 удалений

Просмотреть файл

@ -7,7 +7,7 @@
#define ContainerWriter_h_
#include "nsTArray.h"
#include "EncodedFrameContainer.h"
#include "EncodedFrame.h"
#include "TrackMetadataBase.h"
namespace mozilla {
@ -26,14 +26,15 @@ class ContainerWriter {
enum { END_OF_STREAM = 1 << 0 };
/**
* Writes encoded track data from aBuffer to a packet, and insert this packet
* into the internal stream of container writer. aDuration is the playback
* duration of this packet in number of samples. aFlags is true with
* END_OF_STREAM if this is the last packet of track.
* Currently, WriteEncodedTrack doesn't support multiple tracks.
* Writes encoded track data from aData into the internal stream of container
* writer. aFlags is used to signal the impl of different conditions
* such as END_OF_STREAM. Each impl may handle different flags, and should be
* documented accordingly. Currently, WriteEncodedTrack doesn't support
* explicit track specification, though each impl may provide logic to
* allocate frames into different tracks.
*/
virtual nsresult WriteEncodedTrack(const EncodedFrameContainer& aData,
uint32_t aFlags = 0) = 0;
virtual nsresult WriteEncodedTrack(
const nsTArray<RefPtr<EncodedFrame>>& aData, uint32_t aFlags = 0) = 0;
/**
* Set the meta data pointer into muxer
@ -59,7 +60,7 @@ class ContainerWriter {
* even it is not full, and copy these container data to a buffer for
* aOutputBufs to append.
*/
virtual nsresult GetContainerData(nsTArray<nsTArray<uint8_t> >* aOutputBufs,
virtual nsresult GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
uint32_t aFlags = 0) = 0;
protected:

Просмотреть файл

@ -3,39 +3,14 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef EncodedFrameContainer_H_
#define EncodedFrameContainer_H_
#ifndef EncodedFrame_h_
#define EncodedFrame_h_
#include "nsTArray.h"
#include "nsISupportsImpl.h"
namespace mozilla {
class EncodedFrame;
/*
* This container is used to carry video or audio encoded data from encoder to
* muxer. The media data object is created by encoder and recycle by the
* destructor. Only allow to store audio or video encoded data in EncodedData.
*/
class EncodedFrameContainer {
public:
// Append encoded frame data
void AppendEncodedFrame(EncodedFrame* aEncodedFrame) {
mEncodedFrames.AppendElement(aEncodedFrame);
}
// Retrieve all of the encoded frames
const nsTArray<RefPtr<EncodedFrame> >& GetEncodedFrames() const {
return mEncodedFrames;
}
private:
// This container is used to store the video or audio encoded packets.
// Muxer should check mFrameType and get the encoded data type from
// mEncodedFrames.
nsTArray<RefPtr<EncodedFrame> > mEncodedFrames;
};
// Represent one encoded frame
// Represent an encoded frame emitted by an encoder
class EncodedFrame final {
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(EncodedFrame)
public:
@ -44,18 +19,7 @@ class EncodedFrame final {
VP8_I_FRAME, // VP8 intraframe
VP8_P_FRAME, // VP8 predicted frame
OPUS_AUDIO_FRAME, // Opus audio frame
VORBIS_AUDIO_FRAME,
AVC_I_FRAME,
AVC_P_FRAME,
AVC_B_FRAME,
AVC_CSD, // AVC codec specific data
AAC_AUDIO_FRAME,
AAC_CSD, // AAC codec specific data
AMR_AUDIO_CSD,
AMR_AUDIO_FRAME,
EVRC_AUDIO_CSD,
EVRC_AUDIO_FRAME,
UNKNOWN // FrameType not set
UNKNOWN // FrameType not set
};
void SwapInFrameData(nsTArray<uint8_t>& aData) {
mFrameData.SwapElements(aData);
@ -94,4 +58,4 @@ class EncodedFrame final {
} // namespace mozilla
#endif
#endif // EncodedFrame_h_

Просмотреть файл

@ -902,31 +902,21 @@ nsresult MediaEncoder::EncodeData() {
}
if (mVideoEncoder && !mVideoEncoder->IsEncodingComplete()) {
EncodedFrameContainer encodedVideoData;
nsresult rv = mVideoEncoder->GetEncodedTrack(encodedVideoData);
nsresult rv = mVideoEncoder->GetEncodedTrack(mEncodedVideoFrames);
if (NS_FAILED(rv)) {
// Encoding might be canceled.
LOG(LogLevel::Error, ("Failed to get encoded data from video encoder."));
return rv;
}
for (const RefPtr<EncodedFrame>& frame :
encodedVideoData.GetEncodedFrames()) {
mEncodedVideoFrames.AppendElement(frame);
}
}
if (mAudioEncoder && !mAudioEncoder->IsEncodingComplete()) {
EncodedFrameContainer encodedAudioData;
nsresult rv = mAudioEncoder->GetEncodedTrack(encodedAudioData);
nsresult rv = mAudioEncoder->GetEncodedTrack(mEncodedAudioFrames);
if (NS_FAILED(rv)) {
// Encoding might be canceled.
LOG(LogLevel::Error, ("Failed to get encoded data from audio encoder."));
return rv;
}
for (const RefPtr<EncodedFrame>& frame :
encodedAudioData.GetEncodedFrames()) {
mEncodedAudioFrames.AppendElement(frame);
}
}
return NS_OK;
@ -943,16 +933,11 @@ nsresult MediaEncoder::WriteEncodedDataToMuxer() {
}
if (mVideoEncoder) {
EncodedFrameContainer encodedVideoData;
for (const RefPtr<EncodedFrame>& frame : mEncodedVideoFrames) {
encodedVideoData.AppendEncodedFrame(frame);
}
mEncodedVideoFrames.Clear();
nsresult rv = mWriter->WriteEncodedTrack(
encodedVideoData, mVideoEncoder->IsEncodingComplete()
? ContainerWriter::END_OF_STREAM
: 0);
mEncodedVideoFrames, mVideoEncoder->IsEncodingComplete()
? ContainerWriter::END_OF_STREAM
: 0);
mEncodedVideoFrames.Clear();
if (NS_FAILED(rv)) {
LOG(LogLevel::Error,
("Failed to write encoded video track to the muxer."));
@ -961,16 +946,11 @@ nsresult MediaEncoder::WriteEncodedDataToMuxer() {
}
if (mAudioEncoder) {
EncodedFrameContainer encodedAudioData;
for (const RefPtr<EncodedFrame>& frame : mEncodedAudioFrames) {
encodedAudioData.AppendEncodedFrame(frame);
}
mEncodedAudioFrames.Clear();
nsresult rv = mWriter->WriteEncodedTrack(
encodedAudioData, mAudioEncoder->IsEncodingComplete()
? ContainerWriter::END_OF_STREAM
: 0);
mEncodedAudioFrames, mAudioEncoder->IsEncodingComplete()
? ContainerWriter::END_OF_STREAM
: 0);
mEncodedAudioFrames.Clear();
if (NS_FAILED(rv)) {
LOG(LogLevel::Error,
("Failed to write encoded audio track to the muxer."));

Просмотреть файл

@ -228,7 +228,8 @@ already_AddRefed<TrackMetadataBase> OpusTrackEncoder::GetMetadata() {
return meta.forget();
}
nsresult OpusTrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
nsresult OpusTrackEncoder::GetEncodedTrack(
nsTArray<RefPtr<EncodedFrame>>& aData) {
AUTO_PROFILER_LABEL("OpusTrackEncoder::GetEncodedTrack", OTHER);
MOZ_ASSERT(mInitialized || mCanceled);
@ -426,7 +427,7 @@ nsresult OpusTrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
mOutputTimeStamp +=
FramesToUsecs(GetPacketDuration(), kOpusSamplingRate).value();
LOG("[Opus] mOutputTimeStamp %lld.", mOutputTimeStamp);
aData.AppendEncodedFrame(audiodata);
aData.AppendElement(audiodata);
}
return result >= 0 ? NS_OK : NS_ERROR_FAILURE;

Просмотреть файл

@ -33,7 +33,7 @@ class OpusTrackEncoder : public AudioTrackEncoder {
already_AddRefed<TrackMetadataBase> GetMetadata() override;
nsresult GetEncodedTrack(EncodedFrameContainer& aData) override;
nsresult GetEncodedTrack(nsTArray<RefPtr<EncodedFrame>>& aData) override;
protected:
int GetPacketDuration() override;

Просмотреть файл

@ -7,7 +7,7 @@
#define TrackEncoder_h_
#include "AudioSegment.h"
#include "EncodedFrameContainer.h"
#include "EncodedFrame.h"
#include "MediaStreamGraph.h"
#include "StreamTracks.h"
#include "TrackMetadataBase.h"
@ -82,7 +82,7 @@ class TrackEncoder {
* Encodes raw segments. Result data is returned in aData, and called on the
* worker thread.
*/
virtual nsresult GetEncodedTrack(EncodedFrameContainer& aData) = 0;
virtual nsresult GetEncodedTrack(nsTArray<RefPtr<EncodedFrame>>& aData) = 0;
/**
* Returns true once this TrackEncoder is initialized.

Просмотреть файл

@ -220,7 +220,8 @@ already_AddRefed<TrackMetadataBase> VP8TrackEncoder::GetMetadata() {
return meta.forget();
}
nsresult VP8TrackEncoder::GetEncodedPartitions(EncodedFrameContainer& aData) {
nsresult VP8TrackEncoder::GetEncodedPartitions(
nsTArray<RefPtr<EncodedFrame>>& aData) {
vpx_codec_iter_t iter = nullptr;
EncodedFrame::FrameType frameType = EncodedFrame::VP8_P_FRAME;
nsTArray<uint8_t> frameData;
@ -286,7 +287,7 @@ nsresult VP8TrackEncoder::GetEncodedPartitions(EncodedFrameContainer& aData) {
", FrameType %d",
videoData->GetTimeStamp(), videoData->GetDuration(),
videoData->GetFrameType());
aData.AppendEncodedFrame(videoData);
aData.AppendElement(videoData);
}
return pkt ? NS_OK : NS_ERROR_NOT_AVAILABLE;
@ -441,7 +442,8 @@ VP8TrackEncoder::EncodeOperation VP8TrackEncoder::GetNextEncodeOperation(
* encode it.
* 4. Remove the encoded chunks in mSourceSegment after for-loop.
*/
nsresult VP8TrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
nsresult VP8TrackEncoder::GetEncodedTrack(
nsTArray<RefPtr<EncodedFrame>>& aData) {
AUTO_PROFILER_LABEL("VP8TrackEncoder::GetEncodedTrack", OTHER);
MOZ_ASSERT(mInitialized || mCanceled);
@ -509,7 +511,7 @@ nsresult VP8TrackEncoder::GetEncodedTrack(EncodedFrameContainer& aData) {
// because this frame will be skipped.
VP8LOG(LogLevel::Warning,
"MediaRecorder lagging behind. Skipping a frame.");
RefPtr<EncodedFrame> last = aData.GetEncodedFrames().LastElement();
RefPtr<EncodedFrame> last = aData.LastElement();
if (last) {
mExtractedDuration += chunk.mDuration;
if (!mExtractedDuration.isValid()) {

Просмотреть файл

@ -34,7 +34,7 @@ class VP8TrackEncoder : public VideoTrackEncoder {
already_AddRefed<TrackMetadataBase> GetMetadata() final;
nsresult GetEncodedTrack(EncodedFrameContainer& aData) final;
nsresult GetEncodedTrack(nsTArray<RefPtr<EncodedFrame>>& aData) final;
protected:
nsresult Init(int32_t aWidth, int32_t aHeight, int32_t aDisplayWidth,
@ -50,7 +50,7 @@ class VP8TrackEncoder : public VideoTrackEncoder {
// null for EOS detection.
// NS_OK if some data was appended to aData.
// An error nsresult otherwise.
nsresult GetEncodedPartitions(EncodedFrameContainer& aData);
nsresult GetEncodedPartitions(nsTArray<RefPtr<EncodedFrame>>& aData);
// Prepare the input data to the mVPXImageWrapper for encoding.
nsresult PrepareRawFrame(VideoChunk& aChunk);

Просмотреть файл

@ -9,7 +9,7 @@ with Files('*'):
EXPORTS += [
'ContainerWriter.h',
'EncodedFrameContainer.h',
'EncodedFrame.h',
'MediaEncoder.h',
'OpusTrackEncoder.h',
'TrackEncoder.h',

Просмотреть файл

@ -223,12 +223,12 @@ TEST(OpusAudioTrackEncoder, FrameEncode)
encoder.AppendAudioSegment(std::move(segment));
EncodedFrameContainer container;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
// Verify that encoded data is 5 seconds long.
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
// 44100 as used above gets resampled to 48000 for opus.

Просмотреть файл

@ -143,8 +143,8 @@ TEST(VP8VideoTrackEncoder, FrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(images.Length()));
// Pull Encoded Data back from encoder.
EncodedFrameContainer container;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
}
// Test that encoding a single frame gives useful output.
@ -165,13 +165,12 @@ TEST(VP8VideoTrackEncoder, SingleFrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Read out encoded data, and verify.
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
const size_t oneElement = 1;
ASSERT_EQ(oneElement, frames.Length());
@ -204,14 +203,14 @@ TEST(VP8VideoTrackEncoder, SameFrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.5));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 1.5s.
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
const uint64_t oneAndAHalf = (PR_USEC_PER_SEC / 2) * 3;
@ -240,14 +239,14 @@ TEST(VP8VideoTrackEncoder, SkippedFrames)
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(100));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 100 * 1ms = 100ms.
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
const uint64_t hundredMillis = PR_USEC_PER_SEC / 10;
@ -282,14 +281,14 @@ TEST(VP8VideoTrackEncoder, RoundingErrorFramesEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify total duration being 1s.
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
const uint64_t oneSecond = PR_USEC_PER_SEC;
@ -319,8 +318,8 @@ TEST(VP8VideoTrackEncoder, TimestampFrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.3));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
@ -331,7 +330,7 @@ TEST(VP8VideoTrackEncoder, TimestampFrameEncode)
(PR_USEC_PER_SEC / 10)};
uint64_t totalDuration = 0;
size_t i = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
EXPECT_EQ(expectedDurations[i++], frame->GetDuration());
totalDuration += frame->GetDuration();
}
@ -368,8 +367,8 @@ TEST(VP8VideoTrackEncoder, DriftingFrameEncode)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.3));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
@ -380,7 +379,7 @@ TEST(VP8VideoTrackEncoder, DriftingFrameEncode)
(PR_USEC_PER_SEC / 10) * 2};
uint64_t totalDuration = 0;
size_t i = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
EXPECT_EQ(expectedDurations[i++], frame->GetDuration());
totalDuration += frame->GetDuration();
}
@ -433,17 +432,17 @@ TEST(VP8VideoTrackEncoder, Suspended)
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have two encoded frames and a total duration of 0.2s.
const uint64_t two = 2;
EXPECT_EQ(two, container.GetEncodedFrames().Length());
EXPECT_EQ(two, frames.Length());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
const uint64_t pointTwo = (PR_USEC_PER_SEC / 10) * 2;
@ -483,17 +482,17 @@ TEST(VP8VideoTrackEncoder, SuspendedUntilEnd)
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have one encoded frames and a total duration of 0.1s.
const uint64_t one = 1;
EXPECT_EQ(one, container.GetEncodedFrames().Length());
EXPECT_EQ(one, frames.Length());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
const uint64_t pointOne = PR_USEC_PER_SEC / 10;
@ -522,14 +521,14 @@ TEST(VP8VideoTrackEncoder, AlwaysSuspended)
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have no encoded frames.
const uint64_t none = 0;
EXPECT_EQ(none, container.GetEncodedFrames().Length());
EXPECT_EQ(none, frames.Length());
}
// Test that encoding a track that is suspended in the beginning works.
@ -566,17 +565,17 @@ TEST(VP8VideoTrackEncoder, SuspendedBeginning)
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have one encoded frames and a total duration of 0.1s.
const uint64_t one = 1;
EXPECT_EQ(one, container.GetEncodedFrames().Length());
EXPECT_EQ(one, frames.Length());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
const uint64_t half = PR_USEC_PER_SEC / 2;
@ -619,18 +618,18 @@ TEST(VP8VideoTrackEncoder, SuspendedOverlap)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
// Verify that we have two encoded frames and a total duration of 0.1s.
const uint64_t two = 2;
ASSERT_EQ(two, container.GetEncodedFrames().Length());
ASSERT_EQ(two, frames.Length());
const uint64_t pointFive = (PR_USEC_PER_SEC / 10) * 5;
EXPECT_EQ(pointFive, container.GetEncodedFrames()[0]->GetDuration());
EXPECT_EQ(pointFive, frames[0]->GetDuration());
const uint64_t pointSeven = (PR_USEC_PER_SEC / 10) * 7;
EXPECT_EQ(pointSeven, container.GetEncodedFrames()[1]->GetDuration());
EXPECT_EQ(pointSeven, frames[1]->GetDuration());
}
// Test that ending a track in the middle of already pushed data works.
@ -651,13 +650,13 @@ TEST(VP8VideoTrackEncoder, PrematureEnding)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(0.5));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
const uint64_t half = PR_USEC_PER_SEC / 2;
@ -683,13 +682,13 @@ TEST(VP8VideoTrackEncoder, DelayedStart)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
const uint64_t half = PR_USEC_PER_SEC / 2;
@ -716,13 +715,13 @@ TEST(VP8VideoTrackEncoder, DelayedStartOtherEventOrder)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
const uint64_t half = PR_USEC_PER_SEC / 2;
@ -748,13 +747,13 @@ TEST(VP8VideoTrackEncoder, VeryDelayedStart)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(10.5));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
const uint64_t half = PR_USEC_PER_SEC / 2;
@ -785,34 +784,34 @@ TEST(VP8VideoTrackEncoder, LongFramesReEncoded)
{
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.5));
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_FALSE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
const uint64_t oneSec = PR_USEC_PER_SEC;
EXPECT_EQ(oneSec, totalDuration);
EXPECT_EQ(1U, container.GetEncodedFrames().Length());
EXPECT_EQ(1U, frames.Length());
}
{
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(11));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
uint64_t totalDuration = 0;
for (auto& frame : container.GetEncodedFrames()) {
for (auto& frame : frames) {
totalDuration += frame->GetDuration();
}
const uint64_t tenSec = PR_USEC_PER_SEC * 10;
EXPECT_EQ(tenSec, totalDuration);
EXPECT_EQ(10U, container.GetEncodedFrames().Length());
EXPECT_EQ(10U, frames.Length());
}
}
@ -853,12 +852,11 @@ TEST(VP8VideoTrackEncoder, ShortKeyFrameInterval)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(1.2));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(6UL, frames.Length());
// [0, 400ms)
@ -923,12 +921,11 @@ TEST(VP8VideoTrackEncoder, LongKeyFrameInterval)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2.2));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(6UL, frames.Length());
// [0, 600ms)
@ -991,12 +988,11 @@ TEST(VP8VideoTrackEncoder, DefaultKeyFrameInterval)
encoder.AdvanceCurrentTime(now + TimeDuration::FromSeconds(2.2));
encoder.NotifyEndOfStream();
EncodedFrameContainer container;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(6UL, frames.Length());
// [0, 600ms)
@ -1031,7 +1027,7 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFrameIntervalChanges)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
// Set keyframe interval to 100ms.
@ -1080,7 +1076,7 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFrameIntervalChanges)
// Advancing 501ms, so the first bit of the frame starting at 500ms is
// included.
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(501));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
{
VideoSegment segment;
@ -1106,7 +1102,7 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFrameIntervalChanges)
// Advancing 2000ms from 501ms to 2501ms
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(2501));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
{
VideoSegment segment;
@ -1130,11 +1126,10 @@ TEST(VP8VideoTrackEncoder, DynamicKeyFrameIntervalChanges)
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(14UL, frames.Length());
// [0, 100ms)
@ -1201,7 +1196,7 @@ TEST(VP8VideoTrackEncoder, DisableOnFrameTime)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
// Pass a frame in at t=0.
@ -1226,10 +1221,9 @@ TEST(VP8VideoTrackEncoder, DisableOnFrameTime)
encoder.Disable(now + TimeDuration::FromMilliseconds(100));
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(200));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(2UL, frames.Length());
// [0, 100ms)
@ -1246,7 +1240,7 @@ TEST(VP8VideoTrackEncoder, DisableBetweenFrames)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
// Pass a frame in at t=0.
@ -1268,10 +1262,9 @@ TEST(VP8VideoTrackEncoder, DisableBetweenFrames)
encoder.Disable(now + TimeDuration::FromMilliseconds(50));
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(200));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(3UL, frames.Length());
// [0, 50ms)
@ -1291,7 +1284,7 @@ TEST(VP8VideoTrackEncoder, EnableOnFrameTime)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
// Disable the track at t=0.
@ -1318,10 +1311,9 @@ TEST(VP8VideoTrackEncoder, EnableOnFrameTime)
encoder.Enable(now + TimeDuration::FromMilliseconds(100));
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(200));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(2UL, frames.Length());
// [0, 100ms)
@ -1338,7 +1330,7 @@ TEST(VP8VideoTrackEncoder, EnableBetweenFrames)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
// Disable the track at t=0.
@ -1362,10 +1354,9 @@ TEST(VP8VideoTrackEncoder, EnableBetweenFrames)
encoder.Enable(now + TimeDuration::FromMilliseconds(50));
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(200));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(3UL, frames.Length());
// [0, 50ms)
@ -1384,7 +1375,7 @@ TEST(VP8VideoTrackEncoder, BackwardsTimeResets)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
encoder.SetStartOffset(now);
@ -1431,10 +1422,9 @@ TEST(VP8VideoTrackEncoder, BackwardsTimeResets)
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(300));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(4UL, frames.Length());
// [0, 100ms)
@ -1457,7 +1447,7 @@ TEST(VP8VideoTrackEncoder, NullImageResets)
TestVP8TrackEncoder encoder;
YUVBufferGenerator generator;
generator.Init(mozilla::gfx::IntSize(640, 480));
EncodedFrameContainer container;
nsTArray<RefPtr<EncodedFrame>> frames;
TimeStamp now = TimeStamp::Now();
encoder.SetStartOffset(now);
@ -1504,10 +1494,9 @@ TEST(VP8VideoTrackEncoder, NullImageResets)
encoder.AdvanceCurrentTime(now + TimeDuration::FromMilliseconds(300));
encoder.NotifyEndOfStream();
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
ASSERT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
const nsTArray<RefPtr<EncodedFrame>>& frames = container.GetEncodedFrames();
ASSERT_EQ(3UL, frames.Length());
// [0, 100ms)
@ -1531,8 +1520,8 @@ TEST(VP8VideoTrackEncoder, EncodeComplete)
// Pull Encoded Data back from encoder. Since we have sent
// EOS to encoder, encoder.GetEncodedTrack should return
// NS_OK immidiately.
EncodedFrameContainer container;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(container)));
nsTArray<RefPtr<EncodedFrame>> frames;
EXPECT_TRUE(NS_SUCCEEDED(encoder.GetEncodedTrack(frames)));
EXPECT_TRUE(encoder.IsEncodingComplete());
}

Просмотреть файл

@ -69,7 +69,7 @@ class TestWebMWriter : public WebMWriter {
// previous cluster so that we can retrieve data by |GetContainerData|.
void AppendDummyFrame(EncodedFrame::FrameType aFrameType,
uint64_t aDuration) {
EncodedFrameContainer encodedVideoData;
nsTArray<RefPtr<EncodedFrame>> encodedVideoData;
nsTArray<uint8_t> frameData;
RefPtr<EncodedFrame> videoData = new EncodedFrame();
// Create dummy frame data.
@ -78,13 +78,13 @@ class TestWebMWriter : public WebMWriter {
videoData->SetTimeStamp(mTimestamp);
videoData->SetDuration(aDuration);
videoData->SwapInFrameData(frameData);
encodedVideoData.AppendEncodedFrame(videoData);
encodedVideoData.AppendElement(videoData);
WriteEncodedTrack(encodedVideoData, 0);
mTimestamp += aDuration;
}
bool HaveValidCluster() {
nsTArray<nsTArray<uint8_t> > encodedBuf;
nsTArray<nsTArray<uint8_t>> encodedBuf;
GetContainerData(&encodedBuf, 0);
return (encodedBuf.Length() > 0) ? true : false;
}
@ -100,7 +100,7 @@ TEST(WebMWriter, Metadata)
ContainerWriter::CREATE_VIDEO_TRACK);
// The output should be empty since we didn't set any metadata in writer.
nsTArray<nsTArray<uint8_t> > encodedBuf;
nsTArray<nsTArray<uint8_t>> encodedBuf;
writer.GetContainerData(&encodedBuf, ContainerWriter::GET_HEADER);
EXPECT_TRUE(encodedBuf.Length() == 0);
writer.GetContainerData(&encodedBuf, ContainerWriter::FLUSH_NEEDED);
@ -146,7 +146,7 @@ TEST(WebMWriter, Cluster)
int32_t displayHeight = 240;
writer.SetVP8Metadata(width, height, displayWidth, displayHeight, aTrackRate);
nsTArray<nsTArray<uint8_t> > encodedBuf;
nsTArray<nsTArray<uint8_t>> encodedBuf;
writer.GetContainerData(&encodedBuf, ContainerWriter::GET_HEADER);
EXPECT_TRUE(encodedBuf.Length() > 0);
encodedBuf.Clear();
@ -199,7 +199,7 @@ TEST(WebMWriter, FLUSH_NEEDED)
// retrieved
EXPECT_FALSE(writer.HaveValidCluster());
nsTArray<nsTArray<uint8_t> > encodedBuf;
nsTArray<nsTArray<uint8_t>> encodedBuf;
// Have data because the flag ContainerWriter::FLUSH_NEEDED
writer.GetContainerData(&encodedBuf, ContainerWriter::FLUSH_NEEDED);
EXPECT_TRUE(encodedBuf.Length() > 0);
@ -315,7 +315,7 @@ TEST(WebMWriter, bug970774_aspect_ratio)
writer.AppendDummyFrame(EncodedFrame::VP8_I_FRAME, FIXED_DURATION);
// Get the metadata and the first cluster.
nsTArray<nsTArray<uint8_t> > encodedBuf;
nsTArray<nsTArray<uint8_t>> encodedBuf;
writer.GetContainerData(&encodedBuf, 0);
// Flatten the encodedBuf.
WebMioData ioData;

Просмотреть файл

@ -46,22 +46,20 @@ nsresult OggWriter::Init() {
return (rc == 0) ? NS_OK : NS_ERROR_NOT_INITIALIZED;
}
nsresult OggWriter::WriteEncodedTrack(const EncodedFrameContainer& aData,
uint32_t aFlags) {
nsresult OggWriter::WriteEncodedTrack(
const nsTArray<RefPtr<EncodedFrame>>& aData, uint32_t aFlags) {
AUTO_PROFILER_LABEL("OggWriter::WriteEncodedTrack", OTHER);
uint32_t len = aData.GetEncodedFrames().Length();
uint32_t len = aData.Length();
for (uint32_t i = 0; i < len; i++) {
if (aData.GetEncodedFrames()[i]->GetFrameType() !=
EncodedFrame::OPUS_AUDIO_FRAME) {
if (aData[i]->GetFrameType() != EncodedFrame::OPUS_AUDIO_FRAME) {
LOG("[OggWriter] wrong encoded data type!");
return NS_ERROR_FAILURE;
}
// only pass END_OF_STREAM on the last frame!
nsresult rv = WriteEncodedData(
aData.GetEncodedFrames()[i]->GetFrameData(),
aData.GetEncodedFrames()[i]->GetDuration(),
aData[i]->GetFrameData(), aData[i]->GetDuration(),
i < len - 1 ? (aFlags & ~ContainerWriter::END_OF_STREAM) : aFlags);
if (NS_FAILED(rv)) {
LOG("%p Failed to WriteEncodedTrack!", this);
@ -111,7 +109,7 @@ nsresult OggWriter::WriteEncodedData(const nsTArray<uint8_t>& aBuffer,
return NS_OK;
}
void OggWriter::ProduceOggPage(nsTArray<nsTArray<uint8_t> >* aOutputBufs) {
void OggWriter::ProduceOggPage(nsTArray<nsTArray<uint8_t>>* aOutputBufs) {
aOutputBufs->AppendElement();
aOutputBufs->LastElement().SetLength(mOggPage.header_len + mOggPage.body_len);
memcpy(aOutputBufs->LastElement().Elements(), mOggPage.header,
@ -120,7 +118,7 @@ void OggWriter::ProduceOggPage(nsTArray<nsTArray<uint8_t> >* aOutputBufs) {
mOggPage.body, mOggPage.body_len);
}
nsresult OggWriter::GetContainerData(nsTArray<nsTArray<uint8_t> >* aOutputBufs,
nsresult OggWriter::GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
uint32_t aFlags) {
int rc = -1;
AUTO_PROFILER_LABEL("OggWriter::GetContainerData", OTHER);

Просмотреть файл

@ -23,10 +23,12 @@ class OggWriter : public ContainerWriter {
OggWriter();
~OggWriter();
nsresult WriteEncodedTrack(const EncodedFrameContainer& aData,
// Write frames into the ogg container. aFlags should be set to END_OF_STREAM
// for the final set of frames.
nsresult WriteEncodedTrack(const nsTArray<RefPtr<EncodedFrame>>& aData,
uint32_t aFlags = 0) override;
nsresult GetContainerData(nsTArray<nsTArray<uint8_t> >* aOutputBufs,
nsresult GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
uint32_t aFlags = 0) override;
// Check metadata type integrity and reject unacceptable track encoder.
@ -38,7 +40,7 @@ class OggWriter : public ContainerWriter {
nsresult WriteEncodedData(const nsTArray<uint8_t>& aBuffer, int aDuration,
uint32_t aFlags = 0);
void ProduceOggPage(nsTArray<nsTArray<uint8_t> >* aOutputBufs);
void ProduceOggPage(nsTArray<nsTArray<uint8_t>>* aOutputBufs);
// Store the Medatata from track encoder
RefPtr<OpusMetadata> mMetadata;

Просмотреть файл

@ -19,17 +19,16 @@ WebMWriter::~WebMWriter() {
// Out-of-line dtor so mEbmlComposer nsAutoPtr can delete a complete type.
}
nsresult WebMWriter::WriteEncodedTrack(const EncodedFrameContainer& aData,
uint32_t aFlags) {
nsresult WebMWriter::WriteEncodedTrack(
const nsTArray<RefPtr<EncodedFrame>>& aData, uint32_t aFlags) {
AUTO_PROFILER_LABEL("WebMWriter::WriteEncodedTrack", OTHER);
for (uint32_t i = 0; i < aData.GetEncodedFrames().Length(); i++) {
mEbmlComposer->WriteSimpleBlock(
aData.GetEncodedFrames().ElementAt(i).get());
for (uint32_t i = 0; i < aData.Length(); i++) {
mEbmlComposer->WriteSimpleBlock(aData.ElementAt(i).get());
}
return NS_OK;
}
nsresult WebMWriter::GetContainerData(nsTArray<nsTArray<uint8_t> >* aOutputBufs,
nsresult WebMWriter::GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
uint32_t aFlags) {
AUTO_PROFILER_LABEL("WebMWriter::GetContainerData", OTHER);
mEbmlComposer->ExtractBuffer(aOutputBufs, aFlags);

Просмотреть файл

@ -46,14 +46,16 @@ class WebMWriter : public ContainerWriter {
explicit WebMWriter(uint32_t aTrackTypes);
virtual ~WebMWriter();
// WriteEncodedTrack inserts raw packets into WebM stream.
nsresult WriteEncodedTrack(const EncodedFrameContainer& aData,
// WriteEncodedTrack inserts raw packets into WebM stream. Does not accept
// any flags: any specified will be ignored. Writing is finalized via
// flushing via GetContainerData().
nsresult WriteEncodedTrack(const nsTArray<RefPtr<EncodedFrame>>& aData,
uint32_t aFlags = 0) override;
// GetContainerData outputs multiplexing data.
// aFlags indicates the muxer should enter into finished stage and flush out
// queue data.
nsresult GetContainerData(nsTArray<nsTArray<uint8_t> >* aOutputBufs,
nsresult GetContainerData(nsTArray<nsTArray<uint8_t>>* aOutputBufs,
uint32_t aFlags = 0) override;
// Assign metadata into muxer