зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1101374: create frame time table to filter the invalid pts frame. r=edwin
This commit is contained in:
Родитель
58cc323c20
Коммит
641c2b7fc8
|
@ -204,6 +204,12 @@ GonkAudioDecoderManager::Output(int64_t aStreamOffset,
|
||||||
return NS_OK;
|
return NS_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
nsresult
|
||||||
|
GonkAudioDecoderManager::Flush()
|
||||||
|
{
|
||||||
|
return NS_OK;
|
||||||
|
}
|
||||||
|
|
||||||
void GonkAudioDecoderManager::ReleaseAudioBuffer() {
|
void GonkAudioDecoderManager::ReleaseAudioBuffer() {
|
||||||
if (mAudioBuffer) {
|
if (mAudioBuffer) {
|
||||||
mDecoder->ReleaseMediaBuffer(mAudioBuffer);
|
mDecoder->ReleaseMediaBuffer(mAudioBuffer);
|
||||||
|
|
|
@ -32,6 +32,8 @@ public:
|
||||||
|
|
||||||
virtual nsresult Output(int64_t aStreamOffset,
|
virtual nsresult Output(int64_t aStreamOffset,
|
||||||
nsRefPtr<MediaData>& aOutput) MOZ_OVERRIDE;
|
nsRefPtr<MediaData>& aOutput) MOZ_OVERRIDE;
|
||||||
|
|
||||||
|
virtual nsresult Flush() MOZ_OVERRIDE;
|
||||||
private:
|
private:
|
||||||
|
|
||||||
nsresult CreateAudioData(int64_t aStreamOffset,
|
nsresult CreateAudioData(int64_t aStreamOffset,
|
||||||
|
|
|
@ -135,8 +135,7 @@ GonkMediaDataDecoder::Flush()
|
||||||
// flushing.
|
// flushing.
|
||||||
mTaskQueue->Flush();
|
mTaskQueue->Flush();
|
||||||
|
|
||||||
status_t err = mDecoder->flush();
|
return mManager->Flush();
|
||||||
return err == OK ? NS_OK : NS_ERROR_FAILURE;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
|
|
@ -33,6 +33,7 @@ public:
|
||||||
virtual nsresult Input(mp4_demuxer::MP4Sample* aSample) = 0;
|
virtual nsresult Input(mp4_demuxer::MP4Sample* aSample) = 0;
|
||||||
virtual nsresult Output(int64_t aStreamOffset,
|
virtual nsresult Output(int64_t aStreamOffset,
|
||||||
nsRefPtr<MediaData>& aOutput) = 0;
|
nsRefPtr<MediaData>& aOutput) = 0;
|
||||||
|
virtual nsresult Flush() = 0;
|
||||||
|
|
||||||
virtual void ReleaseMediaResources() {};
|
virtual void ReleaseMediaResources() {};
|
||||||
};
|
};
|
||||||
|
|
|
@ -54,6 +54,7 @@ GonkVideoDecoderManager::GonkVideoDecoderManager(
|
||||||
const mp4_demuxer::VideoDecoderConfig& aConfig)
|
const mp4_demuxer::VideoDecoderConfig& aConfig)
|
||||||
: mImageContainer(aImageContainer)
|
: mImageContainer(aImageContainer)
|
||||||
, mReaderCallback(nullptr)
|
, mReaderCallback(nullptr)
|
||||||
|
, mMonitor("GonkVideoDecoderManager")
|
||||||
, mColorConverterBufferSize(0)
|
, mColorConverterBufferSize(0)
|
||||||
, mNativeWindow(nullptr)
|
, mNativeWindow(nullptr)
|
||||||
, mPendingVideoBuffersLock("GonkVideoDecoderManager::mPendingVideoBuffersLock")
|
, mPendingVideoBuffersLock("GonkVideoDecoderManager::mPendingVideoBuffersLock")
|
||||||
|
@ -121,6 +122,50 @@ GonkVideoDecoderManager::Init(MediaDataDecoderCallback* aCallback)
|
||||||
return mDecoder;
|
return mDecoder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void
|
||||||
|
GonkVideoDecoderManager::QueueFrameTimeIn(int64_t aPTS, int64_t aDuration)
|
||||||
|
{
|
||||||
|
MonitorAutoLock mon(mMonitor);
|
||||||
|
FrameTimeInfo timeInfo = {aPTS, aDuration};
|
||||||
|
mFrameTimeInfo.AppendElement(timeInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
nsresult
|
||||||
|
GonkVideoDecoderManager::QueueFrameTimeOut(int64_t aPTS, int64_t& aDuration)
|
||||||
|
{
|
||||||
|
MonitorAutoLock mon(mMonitor);
|
||||||
|
|
||||||
|
// Set default to 1 here.
|
||||||
|
// During seeking, frames could still in MediaCodec and the mFrameTimeInfo could
|
||||||
|
// be cleared before these frames are out from MediaCodec. This is ok because
|
||||||
|
// these frames are old frame before seeking.
|
||||||
|
aDuration = 1;
|
||||||
|
for (uint32_t i = 0; i < mFrameTimeInfo.Length(); i++) {
|
||||||
|
const FrameTimeInfo& entry = mFrameTimeInfo.ElementAt(i);
|
||||||
|
if (i == 0) {
|
||||||
|
if (entry.pts > aPTS) {
|
||||||
|
// Codec sent a frame with rollbacked PTS time. It could
|
||||||
|
// be codec's problem.
|
||||||
|
ReleaseVideoBuffer();
|
||||||
|
return NS_ERROR_NOT_AVAILABLE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ideally, the first entry in mFrameTimeInfo should be the one we are looking
|
||||||
|
// for. However, MediaCodec could dropped frame and the first entry doesn't
|
||||||
|
// match current decoded frame's PTS.
|
||||||
|
if (entry.pts == aPTS) {
|
||||||
|
aDuration = entry.duration;
|
||||||
|
if (i > 0) {
|
||||||
|
LOG("Frame could be dropped by MediaCodec, %d dropped frames.", i);
|
||||||
|
}
|
||||||
|
mFrameTimeInfo.RemoveElementsAt(0, i+1);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return NS_OK;
|
||||||
|
}
|
||||||
|
|
||||||
nsresult
|
nsresult
|
||||||
GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
|
GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
|
||||||
{
|
{
|
||||||
|
@ -139,6 +184,10 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
|
||||||
return NS_ERROR_UNEXPECTED;
|
return NS_ERROR_UNEXPECTED;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
int64_t duration;
|
||||||
|
nsresult rv = QueueFrameTimeOut(timeUs, duration);
|
||||||
|
NS_ENSURE_SUCCESS(rv, rv);
|
||||||
|
|
||||||
if (mVideoBuffer->range_length() == 0) {
|
if (mVideoBuffer->range_length() == 0) {
|
||||||
// Some decoders may return spurious empty buffers that we just want to ignore
|
// Some decoders may return spurious empty buffers that we just want to ignore
|
||||||
// quoted from Android's AwesomePlayer.cpp
|
// quoted from Android's AwesomePlayer.cpp
|
||||||
|
@ -178,7 +227,7 @@ GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
|
||||||
mImageContainer,
|
mImageContainer,
|
||||||
aStreamOffset,
|
aStreamOffset,
|
||||||
timeUs,
|
timeUs,
|
||||||
1, // We don't know the duration.
|
duration,
|
||||||
textureClient,
|
textureClient,
|
||||||
keyFrame,
|
keyFrame,
|
||||||
-1,
|
-1,
|
||||||
|
@ -400,6 +449,8 @@ GonkVideoDecoderManager::Input(mp4_demuxer::MP4Sample* aSample)
|
||||||
mp4_demuxer::AnnexB::ConvertSample(aSample);
|
mp4_demuxer::AnnexB::ConvertSample(aSample);
|
||||||
// Forward sample data to the decoder.
|
// Forward sample data to the decoder.
|
||||||
|
|
||||||
|
QueueFrameTimeIn(aSample->composition_timestamp, aSample->duration);
|
||||||
|
|
||||||
const uint8_t* data = reinterpret_cast<const uint8_t*>(aSample->data);
|
const uint8_t* data = reinterpret_cast<const uint8_t*>(aSample->data);
|
||||||
uint32_t length = aSample->size;
|
uint32_t length = aSample->size;
|
||||||
rv = mDecoder->Input(data, length, aSample->composition_timestamp, 0);
|
rv = mDecoder->Input(data, length, aSample->composition_timestamp, 0);
|
||||||
|
@ -411,6 +462,19 @@ GonkVideoDecoderManager::Input(mp4_demuxer::MP4Sample* aSample)
|
||||||
return (rv == OK) ? NS_OK : NS_ERROR_FAILURE;
|
return (rv == OK) ? NS_OK : NS_ERROR_FAILURE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
nsresult
|
||||||
|
GonkVideoDecoderManager::Flush()
|
||||||
|
{
|
||||||
|
status_t err = mDecoder->flush();
|
||||||
|
if (err != OK) {
|
||||||
|
return NS_ERROR_FAILURE;
|
||||||
|
}
|
||||||
|
|
||||||
|
MonitorAutoLock mon(mMonitor);
|
||||||
|
mFrameTimeInfo.Clear();
|
||||||
|
return NS_OK;
|
||||||
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
GonkVideoDecoderManager::codecReserved()
|
GonkVideoDecoderManager::codecReserved()
|
||||||
{
|
{
|
||||||
|
|
|
@ -50,6 +50,8 @@ public:
|
||||||
virtual nsresult Output(int64_t aStreamOffset,
|
virtual nsresult Output(int64_t aStreamOffset,
|
||||||
nsRefPtr<MediaData>& aOutput) MOZ_OVERRIDE;
|
nsRefPtr<MediaData>& aOutput) MOZ_OVERRIDE;
|
||||||
|
|
||||||
|
virtual nsresult Flush() MOZ_OVERRIDE;
|
||||||
|
|
||||||
virtual void ReleaseMediaResources();
|
virtual void ReleaseMediaResources();
|
||||||
|
|
||||||
static void RecycleCallback(TextureClient* aClient, void* aClosure);
|
static void RecycleCallback(TextureClient* aClient, void* aClosure);
|
||||||
|
@ -104,6 +106,15 @@ private:
|
||||||
};
|
};
|
||||||
friend class VideoResourceListener;
|
friend class VideoResourceListener;
|
||||||
|
|
||||||
|
// FrameTimeInfo keeps the presentation time stamp (pts) and its duration.
|
||||||
|
// On MediaDecoderStateMachine, it needs pts and duration to display decoded
|
||||||
|
// frame correctly. But OMX can carry one field of time info (kKeyTime) so
|
||||||
|
// we use FrameTimeInfo to keep pts and duration.
|
||||||
|
struct FrameTimeInfo {
|
||||||
|
int64_t pts; // presentation time stamp of this frame.
|
||||||
|
int64_t duration; // the playback duration.
|
||||||
|
};
|
||||||
|
|
||||||
bool SetVideoFormat();
|
bool SetVideoFormat();
|
||||||
|
|
||||||
nsresult CreateVideoData(int64_t aStreamOffset, VideoData** aOutData);
|
nsresult CreateVideoData(int64_t aStreamOffset, VideoData** aOutData);
|
||||||
|
@ -118,6 +129,9 @@ private:
|
||||||
void ReleaseAllPendingVideoBuffers();
|
void ReleaseAllPendingVideoBuffers();
|
||||||
void PostReleaseVideoBuffer(android::MediaBuffer *aBuffer);
|
void PostReleaseVideoBuffer(android::MediaBuffer *aBuffer);
|
||||||
|
|
||||||
|
void QueueFrameTimeIn(int64_t aPTS, int64_t aDuration);
|
||||||
|
nsresult QueueFrameTimeOut(int64_t aPTS, int64_t& aDuration);
|
||||||
|
|
||||||
uint32_t mVideoWidth;
|
uint32_t mVideoWidth;
|
||||||
uint32_t mVideoHeight;
|
uint32_t mVideoHeight;
|
||||||
uint32_t mDisplayWidth;
|
uint32_t mDisplayWidth;
|
||||||
|
@ -139,6 +153,14 @@ private:
|
||||||
android::sp<ALooper> mManagerLooper;
|
android::sp<ALooper> mManagerLooper;
|
||||||
FrameInfo mFrameInfo;
|
FrameInfo mFrameInfo;
|
||||||
|
|
||||||
|
// It protects mFrameTimeInfo.
|
||||||
|
Monitor mMonitor;
|
||||||
|
// Array of FrameTimeInfo whose corresponding frames are sent to OMX.
|
||||||
|
// Ideally, it is a FIFO. Input() adds the entry to the end element and
|
||||||
|
// CreateVideoData() takes the first entry. However, there are exceptions
|
||||||
|
// due to MediaCodec error or seeking.
|
||||||
|
nsTArray<FrameTimeInfo> mFrameTimeInfo;
|
||||||
|
|
||||||
// color converter
|
// color converter
|
||||||
android::I420ColorConverterHelper mColorConverter;
|
android::I420ColorConverterHelper mColorConverter;
|
||||||
nsAutoArrayPtr<uint8_t> mColorConverterBuffer;
|
nsAutoArrayPtr<uint8_t> mColorConverterBuffer;
|
||||||
|
|
Загрузка…
Ссылка в новой задаче