Bug 1108701 - Use MediaPromises for RequestAudioData and RequestVideoData. r=cpearce

This commit is contained in:
Bobby Holley 2014-12-10 14:03:56 -08:00
Родитель 437abd21ee
Коммит 60c0158fdd
15 изменённых файлов: 343 добавлений и 316 удалений

Просмотреть файл

@ -30,37 +30,6 @@ public:
MOZ_ASSERT(aTaskQueue);
}
virtual void OnAudioDecoded(AudioData* aSample) MOZ_OVERRIDE {
MonitorAutoLock lock(mMonitor);
if (!mTarget || !mTaskQueue) {
// We've been shutdown, abort.
return;
}
RefPtr<nsIRunnable> task(new DeliverAudioTask(aSample, mTarget));
mTaskQueue->Dispatch(task);
}
virtual void OnVideoDecoded(VideoData* aSample) MOZ_OVERRIDE {
MonitorAutoLock lock(mMonitor);
if (!mTarget || !mTaskQueue) {
// We've been shutdown, abort.
return;
}
RefPtr<nsIRunnable> task(new DeliverVideoTask(aSample, mTarget));
mTaskQueue->Dispatch(task);
}
virtual void OnNotDecoded(MediaData::Type aType,
MediaDecoderReader::NotDecodedReason aReason) MOZ_OVERRIDE {
MonitorAutoLock lock(mMonitor);
if (!mTarget || !mTaskQueue) {
// We've been shutdown, abort.
return;
}
RefPtr<nsIRunnable> task(new DeliverNotDecodedTask(aType, aReason, mTarget));
mTaskQueue->Dispatch(task);
}
void BreakCycles() {
MonitorAutoLock lock(mMonitor);
mTarget = nullptr;
@ -82,80 +51,6 @@ public:
}
private:
class DeliverAudioTask : public nsRunnable {
public:
DeliverAudioTask(AudioData* aSample, Target* aTarget)
: mSample(aSample)
, mTarget(aTarget)
{
MOZ_COUNT_CTOR(DeliverAudioTask);
}
protected:
~DeliverAudioTask()
{
MOZ_COUNT_DTOR(DeliverAudioTask);
}
public:
NS_METHOD Run() {
mTarget->OnAudioDecoded(mSample);
return NS_OK;
}
private:
nsRefPtr<AudioData> mSample;
RefPtr<Target> mTarget;
};
class DeliverVideoTask : public nsRunnable {
public:
DeliverVideoTask(VideoData* aSample, Target* aTarget)
: mSample(aSample)
, mTarget(aTarget)
{
MOZ_COUNT_CTOR(DeliverVideoTask);
}
protected:
~DeliverVideoTask()
{
MOZ_COUNT_DTOR(DeliverVideoTask);
}
public:
NS_METHOD Run() {
mTarget->OnVideoDecoded(mSample);
return NS_OK;
}
private:
nsRefPtr<VideoData> mSample;
RefPtr<Target> mTarget;
};
class DeliverNotDecodedTask : public nsRunnable {
public:
DeliverNotDecodedTask(MediaData::Type aType,
MediaDecoderReader::NotDecodedReason aReason,
Target* aTarget)
: mType(aType)
, mReason(aReason)
, mTarget(aTarget)
{
MOZ_COUNT_CTOR(DeliverNotDecodedTask);
}
protected:
~DeliverNotDecodedTask()
{
MOZ_COUNT_DTOR(DeliverNotDecodedTask);
}
public:
NS_METHOD Run() {
mTarget->OnNotDecoded(mType, mReason);
return NS_OK;
}
private:
MediaData::Type mType;
MediaDecoderReader::NotDecodedReason mReason;
RefPtr<Target> mTarget;
};
Monitor mMonitor;
RefPtr<MediaTaskQueue> mTaskQueue;
RefPtr<Target> mTarget;

Просмотреть файл

@ -73,6 +73,7 @@ MediaDecoderReader::MediaDecoderReader(AbstractMediaDecoder* aDecoder)
, mDecoder(aDecoder)
, mIgnoreAudioOutputFormat(false)
, mStartTime(-1)
, mHitAudioDecodeError(false)
, mTaskQueueIsBorrowed(false)
, mAudioDiscontinuity(false)
, mVideoDiscontinuity(false)
@ -188,10 +189,11 @@ private:
int64_t mTimeThreshold;
};
void
nsRefPtr<MediaDecoderReader::VideoDataPromise>
MediaDecoderReader::RequestVideoData(bool aSkipToNextKeyframe,
int64_t aTimeThreshold)
{
nsRefPtr<VideoDataPromise> p = mBaseVideoPromise.Ensure(__func__);
bool skip = aSkipToNextKeyframe;
while (VideoQueue().GetSize() == 0 &&
!VideoQueue().IsFinished()) {
@ -204,7 +206,7 @@ MediaDecoderReader::RequestVideoData(bool aSkipToNextKeyframe,
// would hog the decode task queue.
RefPtr<nsIRunnable> task(new RequestVideoWithSkipTask(this, aTimeThreshold));
mTaskQueue->Dispatch(task);
return;
return p;
}
}
if (VideoQueue().GetSize() > 0) {
@ -213,15 +215,20 @@ MediaDecoderReader::RequestVideoData(bool aSkipToNextKeyframe,
v->mDiscontinuity = true;
mVideoDiscontinuity = false;
}
GetCallback()->OnVideoDecoded(v);
mBaseVideoPromise.Resolve(v, __func__);
} else if (VideoQueue().IsFinished()) {
GetCallback()->OnNotDecoded(MediaData::VIDEO_DATA, END_OF_STREAM);
mBaseVideoPromise.Reject(END_OF_STREAM, __func__);
} else {
MOZ_ASSERT(false, "Dropping this promise on the floor");
}
return p;
}
void
nsRefPtr<MediaDecoderReader::AudioDataPromise>
MediaDecoderReader::RequestAudioData()
{
nsRefPtr<AudioDataPromise> p = mBaseAudioPromise.Ensure(__func__);
while (AudioQueue().GetSize() == 0 &&
!AudioQueue().IsFinished()) {
if (!DecodeAudioData()) {
@ -236,7 +243,7 @@ MediaDecoderReader::RequestAudioData()
RefPtr<nsIRunnable> task(NS_NewRunnableMethod(
this, &MediaDecoderReader::RequestAudioData));
mTaskQueue->Dispatch(task.forget());
return;
return p;
}
}
if (AudioQueue().GetSize() > 0) {
@ -245,12 +252,15 @@ MediaDecoderReader::RequestAudioData()
a->mDiscontinuity = true;
mAudioDiscontinuity = false;
}
GetCallback()->OnAudioDecoded(a);
return;
mBaseAudioPromise.Resolve(a, __func__);
} else if (AudioQueue().IsFinished()) {
GetCallback()->OnNotDecoded(MediaData::AUDIO_DATA, END_OF_STREAM);
return;
mBaseAudioPromise.Reject(mHitAudioDecodeError ? DECODE_ERROR : END_OF_STREAM, __func__);
mHitAudioDecodeError = false;
} else {
MOZ_ASSERT(false, "Dropping this promise on the floor");
}
return p;
}
void
@ -288,6 +298,10 @@ MediaDecoderReader::Shutdown()
{
MOZ_ASSERT(OnDecodeThread());
mShutdown = true;
mBaseAudioPromise.RejectIfExists(END_OF_STREAM, __func__);
mBaseVideoPromise.RejectIfExists(END_OF_STREAM, __func__);
ReleaseMediaResources();
nsRefPtr<ShutdownPromise> p;
@ -307,8 +321,9 @@ MediaDecoderReader::Shutdown()
return p;
}
AudioDecodeRendezvous::AudioDecodeRendezvous()
: mMonitor("AudioDecodeRendezvous")
AudioDecodeRendezvous::AudioDecodeRendezvous(MediaDecoderReader *aReader)
: mReader(aReader)
, mMonitor("AudioDecodeRendezvous")
, mHaveResult(false)
{
}
@ -328,10 +343,8 @@ AudioDecodeRendezvous::OnAudioDecoded(AudioData* aSample)
}
void
AudioDecodeRendezvous::OnNotDecoded(MediaData::Type aType,
MediaDecoderReader::NotDecodedReason aReason)
AudioDecodeRendezvous::OnAudioNotDecoded(MediaDecoderReader::NotDecodedReason aReason)
{
MOZ_ASSERT(aType == MediaData::AUDIO_DATA);
MonitorAutoLock mon(mMonitor);
mSample = nullptr;
mStatus = aReason == MediaDecoderReader::DECODE_ERROR ? NS_ERROR_FAILURE : NS_OK;
@ -349,9 +362,18 @@ AudioDecodeRendezvous::Reset()
}
nsresult
AudioDecodeRendezvous::Await(nsRefPtr<AudioData>& aSample)
AudioDecodeRendezvous::RequestAndWait(nsRefPtr<AudioData>& aSample)
{
MonitorAutoLock mon(mMonitor);
// XXXbholley: We hackily use the main thread for calling back the rendezvous,
// since the decode thread is blocked. This is fine for correctness but not
// for jank, and so it goes away in a subsequent patch.
nsCOMPtr<nsIThread> mainThread;
nsresult rv = NS_GetMainThread(getter_AddRefs(mainThread));
NS_ENSURE_SUCCESS(rv, rv);
mReader->RequestAudioData()->Then(mainThread.get(), __func__, this,
&AudioDecodeRendezvous::OnAudioDecoded,
&AudioDecodeRendezvous::OnAudioNotDecoded);
while (!mHaveResult) {
mon.Wait();
}
@ -360,13 +382,4 @@ AudioDecodeRendezvous::Await(nsRefPtr<AudioData>& aSample)
return mStatus;
}
void
AudioDecodeRendezvous::Cancel()
{
MonitorAutoLock mon(mMonitor);
mStatus = NS_ERROR_ABORT;
mHaveResult = true;
mon.NotifyAll();
}
} // namespace mozilla

Просмотреть файл

@ -9,6 +9,7 @@
#include "AbstractMediaDecoder.h"
#include "MediaInfo.h"
#include "MediaData.h"
#include "MediaPromise.h"
#include "MediaQueue.h"
#include "AudioCompactor.h"
@ -37,6 +38,9 @@ public:
CANCELED
};
typedef MediaPromise<nsRefPtr<AudioData>, NotDecodedReason> AudioDataPromise;
typedef MediaPromise<nsRefPtr<VideoData>, NotDecodedReason> VideoDataPromise;
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaDecoderReader)
explicit MediaDecoderReader(AbstractMediaDecoder* aDecoder);
@ -93,22 +97,22 @@ public:
// properly!
virtual nsresult ResetDecode();
// Requests the Reader to call OnAudioDecoded() on aCallback with one
// audio sample. The decode should be performed asynchronously, and
// the callback can be performed on any thread. Don't hold the decoder
// Requests one audio sample from the reader.
//
// The decode should be performed asynchronously, and the promise should
// be resolved when it is complete. Don't hold the decoder
// monitor while calling this, as the implementation may try to wait
// on something that needs the monitor and deadlock.
virtual void RequestAudioData();
virtual nsRefPtr<AudioDataPromise> RequestAudioData();
// Requests the Reader to call OnVideoDecoded() on aCallback with one
// video sample. The decode should be performed asynchronously, and
// the callback can be performed on any thread. Don't hold the decoder
// monitor while calling this, as the implementation may try to wait
// on something that needs the monitor and deadlock.
// Requests one video sample from the reader.
//
// Don't hold the decoder monitor while calling this, as the implementation
// may try to wait on something that needs the monitor and deadlock.
// If aSkipToKeyframe is true, the decode should skip ahead to the
// the next keyframe at or after aTimeThreshold microseconds.
virtual void RequestVideoData(bool aSkipToNextKeyframe,
int64_t aTimeThreshold);
virtual nsRefPtr<VideoDataPromise>
RequestVideoData(bool aSkipToNextKeyframe, int64_t aTimeThreshold);
virtual bool HasAudio() = 0;
virtual bool HasVideo() = 0;
@ -275,10 +279,21 @@ protected:
// and then set to a value >= by MediaDecoderStateMachine::SetStartTime(),
// after which point it never changes.
int64_t mStartTime;
private:
// This is a quick-and-dirty way for DecodeAudioData implementations to
// communicate the presence of a decoding error to RequestAudioData. We should
// replace this with a promise-y mechanism as we make this stuff properly
// async.
bool mHitAudioDecodeError;
private:
nsRefPtr<RequestSampleCallback> mSampleDecodedCallback;
// Promises used only for the base-class (sync->async adapter) implementation
// of Request{Audio,Video}Data.
MediaPromiseHolder<AudioDataPromise> mBaseAudioPromise;
MediaPromiseHolder<VideoDataPromise> mBaseVideoPromise;
nsRefPtr<MediaTaskQueue> mTaskQueue;
bool mTaskQueueIsBorrowed;
@ -297,17 +312,6 @@ class RequestSampleCallback {
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(RequestSampleCallback)
// Receives the result of a RequestAudioData() call.
virtual void OnAudioDecoded(AudioData* aSample) = 0;
// Receives the result of a RequestVideoData() call.
virtual void OnVideoDecoded(VideoData* aSample) = 0;
// Called when a RequestAudioData() or RequestVideoData() call can't be
// fulfiled. The reason is passed as aReason.
virtual void OnNotDecoded(MediaData::Type aType,
MediaDecoderReader::NotDecodedReason aReason) = 0;
virtual void OnSeekCompleted(nsresult aResult) = 0;
// Called during shutdown to break any reference cycles.
@ -321,29 +325,28 @@ protected:
// MediaDecoderReader to block the thread requesting an audio sample until
// the audio decode is complete. This is used to adapt the asynchronous
// model of the MediaDecoderReader to a synchronous model.
class AudioDecodeRendezvous : public RequestSampleCallback {
class AudioDecodeRendezvous {
public:
AudioDecodeRendezvous();
~AudioDecodeRendezvous();
AudioDecodeRendezvous(MediaDecoderReader *aReader);
// RequestSampleCallback implementation. Called when decode is complete.
// Note: aSample is null at end of stream.
virtual void OnAudioDecoded(AudioData* aSample) MOZ_OVERRIDE;
virtual void OnVideoDecoded(VideoData* aSample) MOZ_OVERRIDE {}
virtual void OnNotDecoded(MediaData::Type aType,
MediaDecoderReader::NotDecodedReason aReason) MOZ_OVERRIDE;
virtual void OnSeekCompleted(nsresult aResult) MOZ_OVERRIDE {};
virtual void BreakCycles() MOZ_OVERRIDE {};
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(AudioDecodeRendezvous)
void OnAudioDecoded(AudioData* aSample);
void OnAudioNotDecoded(MediaDecoderReader::NotDecodedReason aReason);
void Reset();
// Returns failure on error, or NS_OK.
// If *aSample is null, EOS has been reached.
nsresult Await(nsRefPtr<AudioData>& aSample);
nsresult RequestAndWait(nsRefPtr<AudioData>& aSample);
// Interrupts a call to Wait().
void Cancel();
protected:
~AudioDecodeRendezvous();
private:
nsRefPtr<MediaDecoderReader> mReader;
Monitor mMonitor;
nsresult mStatus;
nsRefPtr<AudioData> mSample;

Просмотреть файл

@ -623,7 +623,10 @@ MediaDecoderStateMachine::DecodeVideo()
mVideoDecodeStartTime = TimeStamp::Now();
}
mReader->RequestVideoData(skipToNextKeyFrame, currentTime);
mReader->RequestVideoData(skipToNextKeyFrame, currentTime)
->Then(DecodeTaskQueue(), __func__, this,
&MediaDecoderStateMachine::OnVideoDecoded,
&MediaDecoderStateMachine::OnVideoNotDecoded);
}
bool
@ -666,7 +669,9 @@ MediaDecoderStateMachine::DecodeAudio()
mIsAudioPrerolling = false;
}
}
mReader->RequestAudioData();
mReader->RequestAudioData()->Then(DecodeTaskQueue(), __func__, this,
&MediaDecoderStateMachine::OnAudioDecoded,
&MediaDecoderStateMachine::OnAudioNotDecoded);
}
bool
@ -915,6 +920,7 @@ MediaDecoderStateMachine::MaybeFinishDecodeFirstFrame()
void
MediaDecoderStateMachine::OnVideoDecoded(VideoData* aVideoSample)
{
MOZ_ASSERT(OnDecodeThread());
ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
nsRefPtr<VideoData> video(aVideoSample);
mVideoRequestPending = false;
@ -2128,12 +2134,17 @@ MediaDecoderStateMachine::DecodeFirstFrame()
} else {
if (HasAudio()) {
ReentrantMonitorAutoExit unlock(mDecoder->GetReentrantMonitor());
mReader->RequestAudioData();
mReader->RequestAudioData()->Then(DecodeTaskQueue(), __func__, this,
&MediaDecoderStateMachine::OnAudioDecoded,
&MediaDecoderStateMachine::OnAudioNotDecoded);
}
if (HasVideo()) {
mVideoDecodeStartTime = TimeStamp::Now();
ReentrantMonitorAutoExit unlock(mDecoder->GetReentrantMonitor());
mReader->RequestVideoData(false, 0);
mReader->RequestVideoData(false, 0)
->Then(DecodeTaskQueue(), __func__, this,
&MediaDecoderStateMachine::OnVideoDecoded,
&MediaDecoderStateMachine::OnVideoNotDecoded);
}
}

Просмотреть файл

@ -376,6 +376,17 @@ public:
void OnAudioDecoded(AudioData* aSample);
void OnVideoDecoded(VideoData* aSample);
void OnNotDecoded(MediaData::Type aType, MediaDecoderReader::NotDecodedReason aReason);
void OnAudioNotDecoded(MediaDecoderReader::NotDecodedReason aReason)
{
MOZ_ASSERT(OnDecodeThread());
OnNotDecoded(MediaData::AUDIO_DATA, aReason);
}
void OnVideoNotDecoded(MediaDecoderReader::NotDecodedReason aReason)
{
MOZ_ASSERT(OnDecodeThread());
OnNotDecoded(MediaData::VIDEO_DATA, aReason);
}
void OnSeekCompleted(nsresult aResult);
private:

Просмотреть файл

@ -121,8 +121,8 @@ private:
MP4Reader::MP4Reader(AbstractMediaDecoder* aDecoder)
: MediaDecoderReader(aDecoder)
, mAudio("MP4 audio decoder data", Preferences::GetUint("media.mp4-audio-decode-ahead", 2))
, mVideo("MP4 video decoder data", Preferences::GetUint("media.mp4-video-decode-ahead", 2))
, mAudio(MediaData::AUDIO_DATA, Preferences::GetUint("media.mp4-audio-decode-ahead", 2))
, mVideo(MediaData::VIDEO_DATA, Preferences::GetUint("media.mp4-video-decode-ahead", 2))
, mLastReportedNumDecodedFrames(0)
, mLayersBackendType(layers::LayersBackend::LAYERS_NONE)
, mDemuxerInitialized(false)
@ -154,6 +154,9 @@ MP4Reader::Shutdown()
mAudio.mTaskQueue->AwaitShutdownAndIdle();
mAudio.mTaskQueue = nullptr;
}
mAudio.mPromise.SetMonitor(nullptr);
MOZ_ASSERT(mAudio.mPromise.IsEmpty());
if (mVideo.mDecoder) {
Flush(kVideo);
mVideo.mDecoder->Shutdown();
@ -164,6 +167,8 @@ MP4Reader::Shutdown()
mVideo.mTaskQueue->AwaitShutdownAndIdle();
mVideo.mTaskQueue = nullptr;
}
mVideo.mPromise.SetMonitor(nullptr);
MOZ_ASSERT(mVideo.mPromise.IsEmpty());
// Dispose of the queued sample before shutting down the demuxer
mQueuedVideoSample = nullptr;
@ -492,10 +497,13 @@ MP4Reader::DecoderData&
MP4Reader::GetDecoderData(TrackType aTrack)
{
MOZ_ASSERT(aTrack == kAudio || aTrack == kVideo);
return (aTrack == kAudio) ? mAudio : mVideo;
if (aTrack == kAudio) {
return mAudio;
}
return mVideo;
}
void
nsRefPtr<MediaDecoderReader::VideoDataPromise>
MP4Reader::RequestVideoData(bool aSkipToNextKeyframe,
int64_t aTimeThreshold)
{
@ -509,34 +517,40 @@ MP4Reader::RequestVideoData(bool aSkipToNextKeyframe,
MOZ_ASSERT(HasVideo() && mPlatform && mVideo.mDecoder);
bool eos = false;
if (aSkipToNextKeyframe) {
if (!SkipVideoDemuxToNextKeyFrame(aTimeThreshold, parsed) ||
NS_FAILED(mVideo.mDecoder->Flush())) {
eos = !SkipVideoDemuxToNextKeyFrame(aTimeThreshold, parsed);
if (!eos && NS_FAILED(mVideo.mDecoder->Flush())) {
NS_WARNING("Failed to skip/flush video when skipping-to-next-keyframe.");
}
}
auto& decoder = GetDecoderData(kVideo);
MonitorAutoLock lock(decoder.mMonitor);
decoder.mOutputRequested = true;
ScheduleUpdate(kVideo);
MonitorAutoLock lock(mVideo.mMonitor);
nsRefPtr<VideoDataPromise> p = mVideo.mPromise.Ensure(__func__);
if (eos) {
mVideo.mPromise.Reject(END_OF_STREAM, __func__);
} else {
ScheduleUpdate(kVideo);
}
// Report the number of "decoded" frames as the difference in the
// mNumSamplesOutput field since the last time we were called.
uint64_t delta = mVideo.mNumSamplesOutput - mLastReportedNumDecodedFrames;
decoded = static_cast<uint32_t>(delta);
mLastReportedNumDecodedFrames = mVideo.mNumSamplesOutput;
return p;
}
void
nsRefPtr<MediaDecoderReader::AudioDataPromise>
MP4Reader::RequestAudioData()
{
MOZ_ASSERT(GetTaskQueue()->IsCurrentThreadIn());
VLOG("RequestAudioData");
auto& decoder = GetDecoderData(kAudio);
MonitorAutoLock lock(decoder.mMonitor);
decoder.mOutputRequested = true;
MonitorAutoLock lock(mAudio.mMonitor);
nsRefPtr<AudioDataPromise> p = mAudio.mPromise.Ensure(__func__);
ScheduleUpdate(kAudio);
return p;
}
void
@ -566,7 +580,7 @@ MP4Reader::NeedInput(DecoderData& aDecoder)
return
!aDecoder.mError &&
!aDecoder.mDemuxEOS &&
aDecoder.mOutputRequested &&
aDecoder.HasPromise() &&
aDecoder.mOutput.IsEmpty() &&
(aDecoder.mInputExhausted ||
aDecoder.mNumSamplesInput - aDecoder.mNumSamplesOutput < aDecoder.mDecodeAhead);
@ -579,9 +593,7 @@ MP4Reader::Update(TrackType aTrack)
bool needInput = false;
bool needOutput = false;
bool eos = false;
auto& decoder = GetDecoderData(aTrack);
nsRefPtr<MediaData> output;
{
MonitorAutoLock lock(decoder.mMonitor);
decoder.mUpdateScheduled = false;
@ -590,19 +602,23 @@ MP4Reader::Update(TrackType aTrack)
decoder.mInputExhausted = false;
decoder.mNumSamplesInput++;
}
needOutput = decoder.mOutputRequested;
if (needOutput && !decoder.mOutput.IsEmpty()) {
output = decoder.mOutput[0];
decoder.mOutput.RemoveElementAt(0);
if (decoder.HasPromise()) {
needOutput = true;
if (!decoder.mOutput.IsEmpty()) {
nsRefPtr<MediaData> output = decoder.mOutput[0];
decoder.mOutput.RemoveElementAt(0);
ReturnOutput(output, aTrack);
} else if (decoder.mDrainComplete) {
decoder.RejectPromise(END_OF_STREAM, __func__);
}
}
eos = decoder.mDrainComplete;
}
VLOG("Update(%s) ni=%d no=%d iex=%d or=%d fl=%d",
VLOG("Update(%s) ni=%d no=%d iex=%d fl=%d",
TrackTypeToStr(aTrack),
needInput,
needOutput,
decoder.mInputExhausted,
decoder.mOutputRequested,
decoder.mIsFlushing);
if (needInput) {
@ -619,27 +635,17 @@ MP4Reader::Update(TrackType aTrack)
decoder.mDecoder->Drain();
}
}
if (needOutput) {
if (output) {
ReturnOutput(output, aTrack);
} else if (eos) {
ReturnEOS(aTrack);
}
}
}
void
MP4Reader::ReturnOutput(MediaData* aData, TrackType aTrack)
{
auto& decoder = GetDecoderData(aTrack);
{
MonitorAutoLock lock(decoder.mMonitor);
MOZ_ASSERT(decoder.mOutputRequested);
decoder.mOutputRequested = false;
if (decoder.mDiscontinuity) {
decoder.mDiscontinuity = false;
aData->mDiscontinuity = true;
}
decoder.mMonitor.AssertCurrentThreadOwns();
MOZ_ASSERT(decoder.HasPromise());
if (decoder.mDiscontinuity) {
decoder.mDiscontinuity = false;
aData->mDiscontinuity = true;
}
if (aTrack == kAudio) {
@ -653,18 +659,12 @@ MP4Reader::ReturnOutput(MediaData* aData, TrackType aTrack)
mInfo.mAudio.mChannels = audioData->mChannels;
}
GetCallback()->OnAudioDecoded(audioData);
mAudio.mPromise.Resolve(audioData, __func__);
} else if (aTrack == kVideo) {
GetCallback()->OnVideoDecoded(static_cast<VideoData*>(aData));
mVideo.mPromise.Resolve(static_cast<VideoData*>(aData), __func__);
}
}
void
MP4Reader::ReturnEOS(TrackType aTrack)
{
GetCallback()->OnNotDecoded(aTrack == kAudio ? MediaData::AUDIO_DATA : MediaData::VIDEO_DATA, END_OF_STREAM);
}
MP4Sample*
MP4Reader::PopSample(TrackType aTrack)
{
@ -717,7 +717,7 @@ MP4Reader::Output(TrackType aTrack, MediaData* aSample)
decoder.mOutput.AppendElement(aSample);
decoder.mNumSamplesOutput++;
if (NeedInput(decoder) || decoder.mOutputRequested) {
if (NeedInput(decoder) || decoder.HasPromise()) {
ScheduleUpdate(aTrack);
}
}
@ -747,8 +747,10 @@ MP4Reader::Error(TrackType aTrack)
{
MonitorAutoLock mon(data.mMonitor);
data.mError = true;
if (data.HasPromise()) {
data.RejectPromise(DECODE_ERROR, __func__);
}
}
GetCallback()->OnNotDecoded(aTrack == kVideo ? MediaData::VIDEO_DATA : MediaData::AUDIO_DATA, DECODE_ERROR);
}
void
@ -777,10 +779,9 @@ MP4Reader::Flush(TrackType aTrack)
data.mNumSamplesInput = 0;
data.mNumSamplesOutput = 0;
data.mInputExhausted = false;
if (data.mOutputRequested) {
GetCallback()->OnNotDecoded(aTrack == kVideo ? MediaData::VIDEO_DATA : MediaData::AUDIO_DATA, CANCELED);
if (data.HasPromise()) {
data.RejectPromise(CANCELED, __func__);
}
data.mOutputRequested = false;
data.mDiscontinuity = true;
}
if (aTrack == kVideo) {
@ -802,12 +803,9 @@ MP4Reader::SkipVideoDemuxToNextKeyFrame(int64_t aTimeThreshold, uint32_t& parsed
while (true) {
nsAutoPtr<MP4Sample> compressed(PopSample(kVideo));
if (!compressed) {
// EOS, or error. Let the state machine know.
GetCallback()->OnNotDecoded(MediaData::VIDEO_DATA, END_OF_STREAM);
{
MonitorAutoLock mon(mVideo.mMonitor);
mVideo.mDemuxEOS = true;
}
// EOS, or error. This code assumes EOS, which may or may not be right.
MonitorAutoLock mon(mVideo.mMonitor);
mVideo.mDemuxEOS = true;
return false;
}
parsed++;

Просмотреть файл

@ -37,10 +37,10 @@ public:
virtual nsresult Init(MediaDecoderReader* aCloneDonor) MOZ_OVERRIDE;
virtual void RequestVideoData(bool aSkipToNextKeyframe,
int64_t aTimeThreshold) MOZ_OVERRIDE;
virtual nsRefPtr<VideoDataPromise>
RequestVideoData(bool aSkipToNextKeyframe, int64_t aTimeThreshold) MOZ_OVERRIDE;
virtual void RequestAudioData() MOZ_OVERRIDE;
virtual nsRefPtr<AudioDataPromise> RequestAudioData() MOZ_OVERRIDE;
virtual bool HasAudio() MOZ_OVERRIDE;
virtual bool HasVideo() MOZ_OVERRIDE;
@ -75,7 +75,6 @@ public:
private:
void ReturnEOS(TrackType aTrack);
void ReturnOutput(MediaData* aData, TrackType aTrack);
// Sends input to decoder for aTrack, and output to the state machine,
@ -146,9 +145,11 @@ private:
};
struct DecoderData {
DecoderData(const char* aMonitorName,
DecoderData(MediaData::Type aType,
uint32_t aDecodeAhead)
: mMonitor(aMonitorName)
: mType(aType)
, mMonitor(aType == MediaData::AUDIO_DATA ? "MP4 audio decoder data"
: "MP4 video decoder data")
, mNumSamplesInput(0)
, mNumSamplesOutput(0)
, mDecodeAhead(aDecodeAhead)
@ -156,7 +157,6 @@ private:
, mInputExhausted(false)
, mError(false)
, mIsFlushing(false)
, mOutputRequested(false)
, mUpdateScheduled(false)
, mDemuxEOS(false)
, mDrainComplete(false)
@ -174,6 +174,13 @@ private:
// Decoded samples returned my mDecoder awaiting being returned to
// state machine upon request.
nsTArray<nsRefPtr<MediaData> > mOutput;
// Disambiguate Audio vs Video.
MediaData::Type mType;
// These get overriden in the templated concrete class.
virtual bool HasPromise() = 0;
virtual void RejectPromise(MediaDecoderReader::NotDecodedReason aReason,
const char* aMethodName) = 0;
// Monitor that protects all non-threadsafe state; the primitives
// that follow.
@ -186,14 +193,33 @@ private:
bool mInputExhausted;
bool mError;
bool mIsFlushing;
bool mOutputRequested;
bool mUpdateScheduled;
bool mDemuxEOS;
bool mDrainComplete;
bool mDiscontinuity;
};
DecoderData mAudio;
DecoderData mVideo;
template<typename PromiseType>
struct DecoderDataWithPromise : public DecoderData {
DecoderDataWithPromise(MediaData::Type aType, uint32_t aDecodeAhead) :
DecoderData(aType, aDecodeAhead)
{
mPromise.SetMonitor(&mMonitor);
}
MediaPromiseHolder<PromiseType> mPromise;
bool HasPromise() MOZ_OVERRIDE { return !mPromise.IsEmpty(); }
void RejectPromise(MediaDecoderReader::NotDecodedReason aReason,
const char* aMethodName) MOZ_OVERRIDE
{
mPromise.Reject(aReason, aMethodName);
}
};
DecoderDataWithPromise<AudioDataPromise> mAudio;
DecoderDataWithPromise<VideoDataPromise> mVideo;
// Queued samples extracted by the demuxer, but not yet sent to the platform
// decoder.
nsAutoPtr<mp4_demuxer::MP4Sample> mQueuedVideoSample;

Просмотреть файл

@ -94,18 +94,22 @@ MediaSourceReader::IsWaitingMediaResources()
return !mHasEssentialTrackBuffers;
}
void
nsRefPtr<MediaDecoderReader::AudioDataPromise>
MediaSourceReader::RequestAudioData()
{
nsRefPtr<AudioDataPromise> p = mAudioPromise.Ensure(__func__);
MSE_DEBUGV("MediaSourceReader(%p)::RequestAudioData", this);
if (!mAudioReader) {
MSE_DEBUG("MediaSourceReader(%p)::RequestAudioData called with no audio reader", this);
GetCallback()->OnNotDecoded(MediaData::AUDIO_DATA, DECODE_ERROR);
return;
mAudioPromise.Reject(DECODE_ERROR, __func__);
return p;
}
mAudioIsSeeking = false;
SwitchAudioReader(mLastAudioTime);
mAudioReader->RequestAudioData();
mAudioReader->RequestAudioData()->Then(GetTaskQueue(), __func__, this,
&MediaSourceReader::OnAudioDecoded,
&MediaSourceReader::OnAudioNotDecoded);
return p;
}
void
@ -117,7 +121,9 @@ MediaSourceReader::OnAudioDecoded(AudioData* aSample)
if (aSample->mTime < mTimeThreshold) {
MSE_DEBUG("MediaSourceReader(%p)::OnAudioDecoded mTime=%lld < mTimeThreshold=%lld",
this, aSample->mTime, mTimeThreshold);
mAudioReader->RequestAudioData();
mAudioReader->RequestAudioData()->Then(GetTaskQueue(), __func__, this,
&MediaSourceReader::OnAudioDecoded,
&MediaSourceReader::OnAudioNotDecoded);
return;
}
mDropAudioBeforeThreshold = false;
@ -129,18 +135,84 @@ MediaSourceReader::OnAudioDecoded(AudioData* aSample)
if (!mAudioIsSeeking) {
mLastAudioTime = aSample->mTime + aSample->mDuration;
}
GetCallback()->OnAudioDecoded(aSample);
mAudioPromise.Resolve(aSample, __func__);
}
// Find the closest approximation to the end time for this stream.
// mLast{Audio,Video}Time differs from the actual end time because of
// Bug 1065207 - the duration of a WebM fragment is an estimate not the
// actual duration. In the case of audio time an example of where they
// differ would be the actual sample duration being small but the
// previous sample being large. The buffered end time uses that last
// sample duration as an estimate of the end time duration giving an end
// time that is greater than mLastAudioTime, which is the actual sample
// end time.
// Reader switching is based on the buffered end time though so they can be
// quite different. By using the EOS_FUZZ_US and the buffered end time we
// attempt to account for this difference.
static void
AdjustEndTime(int64_t* aEndTime, MediaDecoderReader* aReader)
{
if (aReader) {
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
aReader->GetBuffered(ranges);
if (ranges->Length() > 0) {
// End time is a double so we convert to nearest by adding 0.5.
int64_t end = ranges->GetEndTime() * USECS_PER_S + 0.5;
*aEndTime = std::max(*aEndTime, end);
}
}
}
void
MediaSourceReader::OnAudioNotDecoded(NotDecodedReason aReason)
{
MSE_DEBUG("MediaSourceReader(%p)::OnAudioNotDecoded aReason=%u IsEnded: %d", this, aReason, IsEnded());
if (aReason == DECODE_ERROR || aReason == CANCELED) {
mAudioPromise.Reject(aReason, __func__);
return;
}
// End of stream. Force switching past this stream to another reader by
// switching to the end of the buffered range.
MOZ_ASSERT(aReason == END_OF_STREAM);
if (mAudioReader) {
AdjustEndTime(&mLastAudioTime, mAudioReader);
}
// See if we can find a different reader that can pick up where we left off. We use the
// EOS_FUZZ_US to allow for the fact that our end time can be inaccurate due to bug
// 1065207.
if (SwitchAudioReader(mLastAudioTime + EOS_FUZZ_US)) {
mAudioReader->RequestAudioData()->Then(GetTaskQueue(), __func__, this,
&MediaSourceReader::OnAudioDecoded,
&MediaSourceReader::OnAudioNotDecoded);
return;
}
// If the entire MediaSource is done, generate an EndOfStream.
if (IsEnded()) {
mAudioPromise.Reject(END_OF_STREAM, __func__);
return;
}
// We don't have the data the caller wants. Tell that we're waiting for JS to
// give us more data.
mAudioPromise.Reject(WAITING_FOR_DATA, __func__);
}
nsRefPtr<MediaDecoderReader::VideoDataPromise>
MediaSourceReader::RequestVideoData(bool aSkipToNextKeyframe, int64_t aTimeThreshold)
{
nsRefPtr<VideoDataPromise> p = mVideoPromise.Ensure(__func__);
MSE_DEBUGV("MediaSourceReader(%p)::RequestVideoData(%d, %lld)",
this, aSkipToNextKeyframe, aTimeThreshold);
if (!mVideoReader) {
MSE_DEBUG("MediaSourceReader(%p)::RequestVideoData called with no video reader", this);
GetCallback()->OnNotDecoded(MediaData::VIDEO_DATA, DECODE_ERROR);
return;
mVideoPromise.Reject(DECODE_ERROR, __func__);
return p;
}
if (aSkipToNextKeyframe) {
mTimeThreshold = aTimeThreshold;
@ -149,7 +221,11 @@ MediaSourceReader::RequestVideoData(bool aSkipToNextKeyframe, int64_t aTimeThres
}
mVideoIsSeeking = false;
SwitchVideoReader(mLastVideoTime);
mVideoReader->RequestVideoData(aSkipToNextKeyframe, aTimeThreshold);
mVideoReader->RequestVideoData(aSkipToNextKeyframe, aTimeThreshold)
->Then(GetTaskQueue(), __func__, this,
&MediaSourceReader::OnVideoDecoded, &MediaSourceReader::OnVideoNotDecoded);
return p;
}
void
@ -161,7 +237,9 @@ MediaSourceReader::OnVideoDecoded(VideoData* aSample)
if (aSample->mTime < mTimeThreshold) {
MSE_DEBUG("MediaSourceReader(%p)::OnVideoDecoded mTime=%lld < mTimeThreshold=%lld",
this, aSample->mTime, mTimeThreshold);
mVideoReader->RequestVideoData(false, 0);
mVideoReader->RequestVideoData(false, 0)->Then(GetTaskQueue(), __func__, this,
&MediaSourceReader::OnVideoDecoded,
&MediaSourceReader::OnVideoNotDecoded);
return;
}
mDropVideoBeforeThreshold = false;
@ -173,67 +251,46 @@ MediaSourceReader::OnVideoDecoded(VideoData* aSample)
if (!mVideoIsSeeking) {
mLastVideoTime = aSample->mTime + aSample->mDuration;
}
GetCallback()->OnVideoDecoded(aSample);
mVideoPromise.Resolve(aSample, __func__);
}
void
MediaSourceReader::OnNotDecoded(MediaData::Type aType, NotDecodedReason aReason)
MediaSourceReader::OnVideoNotDecoded(NotDecodedReason aReason)
{
MSE_DEBUG("MediaSourceReader(%p)::OnNotDecoded aType=%u aReason=%u IsEnded: %d", this, aType, aReason, IsEnded());
MSE_DEBUG("MediaSourceReader(%p)::OnVideoNotDecoded aReason=%u IsEnded: %d", this, aReason, IsEnded());
if (aReason == DECODE_ERROR || aReason == CANCELED) {
GetCallback()->OnNotDecoded(aType, aReason);
mVideoPromise.Reject(aReason, __func__);
return;
}
// End of stream. Force switching past this stream to another reader by
// switching to the end of the buffered range.
MOZ_ASSERT(aReason == END_OF_STREAM);
nsRefPtr<MediaDecoderReader> reader = aType == MediaData::AUDIO_DATA ?
mAudioReader : mVideoReader;
// Find the closest approximation to the end time for this stream.
// mLast{Audio,Video}Time differs from the actual end time because of
// Bug 1065207 - the duration of a WebM fragment is an estimate not the
// actual duration. In the case of audio time an example of where they
// differ would be the actual sample duration being small but the
// previous sample being large. The buffered end time uses that last
// sample duration as an estimate of the end time duration giving an end
// time that is greater than mLastAudioTime, which is the actual sample
// end time.
// Reader switching is based on the buffered end time though so they can be
// quite different. By using the EOS_FUZZ_US and the buffered end time we
// attempt to account for this difference.
int64_t* time = aType == MediaData::AUDIO_DATA ? &mLastAudioTime : &mLastVideoTime;
if (reader) {
nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
reader->GetBuffered(ranges);
if (ranges->Length() > 0) {
// End time is a double so we convert to nearest by adding 0.5.
int64_t end = ranges->GetEndTime() * USECS_PER_S + 0.5;
*time = std::max(*time, end);
}
if (mVideoReader) {
AdjustEndTime(&mLastVideoTime, mAudioReader);
}
// See if we can find a different reader that can pick up where we left off. We use the
// EOS_FUZZ_US to allow for the fact that our end time can be inaccurate due to bug
// 1065207 - the duration of a WebM frame is an estimate.
if (aType == MediaData::AUDIO_DATA && SwitchAudioReader(*time + EOS_FUZZ_US)) {
RequestAudioData();
return;
}
if (aType == MediaData::VIDEO_DATA && SwitchVideoReader(*time + EOS_FUZZ_US)) {
RequestVideoData(false, 0);
// 1065207.
if (SwitchVideoReader(mLastVideoTime + EOS_FUZZ_US)) {
mVideoReader->RequestVideoData(false, 0)
->Then(GetTaskQueue(), __func__, this,
&MediaSourceReader::OnVideoDecoded,
&MediaSourceReader::OnVideoNotDecoded);
return;
}
// If the entire MediaSource is done, generate an EndOfStream.
if (IsEnded()) {
GetCallback()->OnNotDecoded(aType, END_OF_STREAM);
mVideoPromise.Reject(END_OF_STREAM, __func__);
return;
}
// We don't have the data the caller wants. Tell that we're waiting for JS to
// give us more data.
GetCallback()->OnNotDecoded(aType, WAITING_FOR_DATA);
mVideoPromise.Reject(WAITING_FOR_DATA, __func__);
}
nsRefPtr<ShutdownPromise>
@ -264,6 +321,9 @@ MediaSourceReader::ContinueShutdown(bool aSuccess)
mVideoTrack = nullptr;
mVideoReader = nullptr;
MOZ_ASSERT(mAudioPromise.IsEmpty());
MOZ_ASSERT(mVideoPromise.IsEmpty());
MediaDecoderReader::Shutdown()->ChainTo(mMediaSourceShutdownPromise.Steal(), __func__);
}

Просмотреть файл

@ -46,15 +46,14 @@ public:
bool IsWaitingMediaResources() MOZ_OVERRIDE;
void RequestAudioData() MOZ_OVERRIDE;
nsRefPtr<AudioDataPromise> RequestAudioData() MOZ_OVERRIDE;
nsRefPtr<VideoDataPromise>
RequestVideoData(bool aSkipToNextKeyframe, int64_t aTimeThreshold) MOZ_OVERRIDE;
void OnAudioDecoded(AudioData* aSample);
void RequestVideoData(bool aSkipToNextKeyframe, int64_t aTimeThreshold) MOZ_OVERRIDE;
void OnAudioNotDecoded(NotDecodedReason aReason);
void OnVideoDecoded(VideoData* aSample);
void OnNotDecoded(MediaData::Type aType, NotDecodedReason aReason);
void OnVideoNotDecoded(NotDecodedReason aReason);
void OnSeekCompleted(nsresult aResult);
@ -140,6 +139,9 @@ private:
nsRefPtr<TrackBuffer> mAudioTrack;
nsRefPtr<TrackBuffer> mVideoTrack;
MediaPromiseHolder<AudioDataPromise> mAudioPromise;
MediaPromiseHolder<VideoDataPromise> mVideoPromise;
#ifdef MOZ_EME
nsRefPtr<CDMProxy> mCDMProxy;
#endif

Просмотреть файл

@ -347,6 +347,8 @@ MediaCodecReader::ReleaseMediaResources()
nsRefPtr<ShutdownPromise>
MediaCodecReader::Shutdown()
{
MOZ_ASSERT(mAudioPromise.IsEmpty());
MOZ_ASSERT(mVideoPromise.IsEmpty());
ReleaseResources();
return MediaDecoderReader::Shutdown();
}
@ -374,23 +376,28 @@ MediaCodecReader::DispatchVideoTask(int64_t aTimeThreshold)
}
}
void
nsRefPtr<MediaDecoderReader::AudioDataPromise>
MediaCodecReader::RequestAudioData()
{
MOZ_ASSERT(GetTaskQueue()->IsCurrentThreadIn());
MOZ_ASSERT(HasAudio());
nsRefPtr<AudioDataPromise> p = mAudioPromise.Ensure(__func__);
if (CheckAudioResources()) {
DispatchAudioTask();
}
return p;
}
void
nsRefPtr<MediaDecoderReader::VideoDataPromise>
MediaCodecReader::RequestVideoData(bool aSkipToNextKeyframe,
int64_t aTimeThreshold)
{
MOZ_ASSERT(GetTaskQueue()->IsCurrentThreadIn());
MOZ_ASSERT(HasVideo());
nsRefPtr<VideoDataPromise> p = mVideoPromise.Ensure(__func__);
int64_t threshold = sInvalidTimestampUs;
if (aSkipToNextKeyframe && IsValidTimestampUs(aTimeThreshold)) {
mVideoTrack.mTaskQueue->Flush();
@ -399,6 +406,8 @@ MediaCodecReader::RequestVideoData(bool aSkipToNextKeyframe,
if (CheckVideoResources()) {
DispatchVideoTask(threshold);
}
return p;
}
bool
@ -484,11 +493,11 @@ MediaCodecReader::DecodeAudioDataTask()
a->mDiscontinuity = true;
mAudioTrack.mDiscontinuity = false;
}
GetCallback()->OnAudioDecoded(a);
mAudioPromise.Resolve(a, __func__);
}
}
if (AudioQueue().AtEndOfStream()) {
GetCallback()->OnNotDecoded(MediaData::AUDIO_DATA, END_OF_STREAM);
else if (AudioQueue().AtEndOfStream()) {
mAudioPromise.Reject(END_OF_STREAM, __func__);
}
return result;
}
@ -504,11 +513,11 @@ MediaCodecReader::DecodeVideoFrameTask(int64_t aTimeThreshold)
v->mDiscontinuity = true;
mVideoTrack.mDiscontinuity = false;
}
GetCallback()->OnVideoDecoded(v);
mVideoPromise.Resolve(v, __func__);
}
}
if (VideoQueue().AtEndOfStream()) {
GetCallback()->OnNotDecoded(MediaData::VIDEO_DATA, END_OF_STREAM);
else if (VideoQueue().AtEndOfStream()) {
mVideoPromise.Reject(END_OF_STREAM, __func__);
}
return result;
}

Просмотреть файл

@ -79,11 +79,12 @@ public:
virtual nsresult ResetDecode() MOZ_OVERRIDE;
// Disptach a DecodeVideoFrameTask to decode video data.
virtual void RequestVideoData(bool aSkipToNextKeyframe,
int64_t aTimeThreshold) MOZ_OVERRIDE;
virtual nsRefPtr<VideoDataPromise>
RequestVideoData(bool aSkipToNextKeyframe,
int64_t aTimeThreshold) MOZ_OVERRIDE;
// Disptach a DecodeAduioDataTask to decode video data.
virtual void RequestAudioData() MOZ_OVERRIDE;
virtual nsRefPtr<AudioDataPromise> RequestAudioData() MOZ_OVERRIDE;
virtual bool HasAudio();
virtual bool HasVideo();

Просмотреть файл

@ -74,19 +74,19 @@ RtspMediaCodecReader::EnsureActive()
mRtspResource->SetSuspend(false);
}
void
nsRefPtr<MediaDecoderReader::AudioDataPromise>
RtspMediaCodecReader::RequestAudioData()
{
EnsureActive();
MediaCodecReader::RequestAudioData();
return MediaCodecReader::RequestAudioData();
}
void
nsRefPtr<MediaDecoderReader::VideoDataPromise>
RtspMediaCodecReader::RequestVideoData(bool aSkipToNextKeyframe,
int64_t aTimeThreshold)
{
EnsureActive();
MediaCodecReader::RequestVideoData(aSkipToNextKeyframe, aTimeThreshold);
return MediaCodecReader::RequestVideoData(aSkipToNextKeyframe, aTimeThreshold);
}
nsresult

Просмотреть файл

@ -55,11 +55,12 @@ public:
virtual void SetIdle() MOZ_OVERRIDE;
// Disptach a DecodeVideoFrameTask to decode video data.
virtual void RequestVideoData(bool aSkipToNextKeyframe,
int64_t aTimeThreshold) MOZ_OVERRIDE;
virtual nsRefPtr<VideoDataPromise>
RequestVideoData(bool aSkipToNextKeyframe,
int64_t aTimeThreshold) MOZ_OVERRIDE;
// Disptach a DecodeAudioDataTask to decode audio data.
virtual void RequestAudioData() MOZ_OVERRIDE;
virtual nsRefPtr<AudioDataPromise> RequestAudioData() MOZ_OVERRIDE;
virtual nsresult ReadMetadata(MediaInfo* aInfo,
MetadataTags** aTags) MOZ_OVERRIDE;

Просмотреть файл

@ -261,12 +261,10 @@ MediaDecodeTask::Decode()
}
MediaQueue<AudioData> audioQueue;
nsRefPtr<AudioDecodeRendezvous> barrier(new AudioDecodeRendezvous());
mDecoderReader->SetCallback(barrier);
nsRefPtr<AudioDecodeRendezvous> barrier(new AudioDecodeRendezvous(mDecoderReader));
while (1) {
mDecoderReader->RequestAudioData();
nsRefPtr<AudioData> audio;
if (NS_FAILED(barrier->Await(audio))) {
if (NS_FAILED(barrier->RequestAndWait(audio))) {
mDecoderReader->Shutdown();
ReportFailureOnMainThread(WebAudioDecodeJob::InvalidContent);
return;

Просмотреть файл

@ -744,7 +744,7 @@ bool WebMReader::DecodeOpus(const unsigned char* aData, size_t aLength,
// Discard padding should be used only on the final packet, so
// decoding after a padding discard is invalid.
LOG(PR_LOG_DEBUG, ("Opus error, discard padding on interstitial packet"));
GetCallback()->OnNotDecoded(MediaData::AUDIO_DATA, DECODE_ERROR);
mHitAudioDecodeError = true;
return false;
}
@ -798,8 +798,7 @@ bool WebMReader::DecodeOpus(const unsigned char* aData, size_t aLength,
if (discardPadding < 0) {
// Negative discard padding is invalid.
LOG(PR_LOG_DEBUG, ("Opus error, negative discard padding"));
GetCallback()->OnNotDecoded(MediaData::AUDIO_DATA, DECODE_ERROR);
return false;
mHitAudioDecodeError = true;
}
if (discardPadding > 0) {
CheckedInt64 discardFrames = UsecsToFrames(discardPadding / NS_PER_USEC,
@ -811,7 +810,7 @@ bool WebMReader::DecodeOpus(const unsigned char* aData, size_t aLength,
if (discardFrames.value() > frames) {
// Discarding more than the entire packet is invalid.
LOG(PR_LOG_DEBUG, ("Opus error, discard padding larger than packet"));
GetCallback()->OnNotDecoded(MediaData::AUDIO_DATA, DECODE_ERROR);
mHitAudioDecodeError = true;
return false;
}
LOG(PR_LOG_DEBUG, ("Opus decoder discarding %d of %d frames",