Bug 1151378: Part1. Remove of now unused code path. r=k17e

libstagefright is now only used to read the metadata.
This commit is contained in:
Jean-Yves Avenard 2015-04-07 20:33:18 +10:00
Родитель 3daebe5323
Коммит f17c6eb49e
3 изменённых файлов: 35 добавлений и 126 удалений

Просмотреть файл

@ -8,7 +8,6 @@
#include "mp4_demuxer/DecoderData.h"
#include <media/stagefright/foundation/ABitReader.h>
#include "media/stagefright/MetaData.h"
#include "media/stagefright/MediaBuffer.h"
#include "media/stagefright/MediaDefs.h"
#include "media/stagefright/Utils.h"
#include "mozilla/ArrayUtils.h"
@ -198,8 +197,7 @@ VideoDecoderConfig::IsValid()
}
MP4Sample::MP4Sample()
: mMediaBuffer(nullptr)
, decode_timestamp(0)
: decode_timestamp(0)
, composition_timestamp(0)
, duration(0)
, byte_offset(0)
@ -232,26 +230,6 @@ MP4Sample::Clone() const
MP4Sample::~MP4Sample()
{
if (mMediaBuffer) {
mMediaBuffer->release();
}
}
void
MP4Sample::Update(int64_t& aMediaTime)
{
sp<MetaData> m = mMediaBuffer->meta_data();
// XXXbholley - Why don't we adjust decode_timestamp for aMediaTime?
// According to k17e, this code path is no longer used - we should probably remove it.
decode_timestamp = FindInt64(m, kKeyDecodingTime);
composition_timestamp = FindInt64(m, kKeyTime) - aMediaTime;
duration = FindInt64(m, kKeyDuration);
byte_offset = FindInt64(m, kKey64BitFileOffset);
is_sync_point = FindInt32(m, kKeyIsSyncFrame);
data = reinterpret_cast<uint8_t*>(mMediaBuffer->data());
size = mMediaBuffer->range_length();
crypto.Update(m);
}
bool
@ -259,25 +237,14 @@ MP4Sample::Pad(size_t aPaddingBytes)
{
size_t newSize = size + aPaddingBytes;
// If the existing MediaBuffer has enough space then we just recycle it. If
// not then we copy to a new buffer.
uint8_t* newData = mMediaBuffer && newSize <= mMediaBuffer->size()
? data
: new (fallible) uint8_t[newSize];
uint8_t* newData = new (fallible) uint8_t[newSize];
if (!newData) {
return false;
}
memset(newData + size, 0, aPaddingBytes);
if (newData != data) {
memcpy(newData, data, size);
extra_buffer = data = newData;
if (mMediaBuffer) {
mMediaBuffer->release();
mMediaBuffer = nullptr;
}
}
memcpy(newData, data, size);
extra_buffer = data = newData;
return true;
}
@ -287,11 +254,7 @@ MP4Sample::Prepend(const uint8_t* aData, size_t aSize)
{
size_t newSize = size + aSize;
// If the existing MediaBuffer has enough space then we just recycle it. If
// not then we copy to a new buffer.
uint8_t* newData = mMediaBuffer && newSize <= mMediaBuffer->size()
? data
: new (fallible) uint8_t[newSize];
uint8_t* newData = new (fallible) uint8_t[newSize];
if (!newData) {
return false;
}
@ -299,14 +262,7 @@ MP4Sample::Prepend(const uint8_t* aData, size_t aSize)
memmove(newData + aSize, data, size);
memmove(newData, aData, aSize);
size = newSize;
if (newData != data) {
extra_buffer = data = newData;
if (mMediaBuffer) {
mMediaBuffer->release();
mMediaBuffer = nullptr;
}
}
extra_buffer = data = newData;
return true;
}
@ -314,25 +270,14 @@ MP4Sample::Prepend(const uint8_t* aData, size_t aSize)
bool
MP4Sample::Replace(const uint8_t* aData, size_t aSize)
{
// If the existing MediaBuffer has enough space then we just recycle it. If
// not then we copy to a new buffer.
uint8_t* newData = mMediaBuffer && aSize <= mMediaBuffer->size()
? data
: new (fallible) uint8_t[aSize];
uint8_t* newData = new (fallible) uint8_t[aSize];
if (!newData) {
return false;
}
memcpy(newData, aData, aSize);
size = aSize;
if (newData != data) {
extra_buffer = data = newData;
if (mMediaBuffer) {
mMediaBuffer->release();
mMediaBuffer = nullptr;
}
}
extra_buffer = data = newData;
return true;
}

Просмотреть файл

@ -17,7 +17,6 @@ namespace stagefright
{
template <typename T> class sp;
class MetaData;
class MediaBuffer;
}
namespace mp4_demuxer
@ -190,11 +189,8 @@ public:
MP4Sample();
virtual ~MP4Sample();
MP4Sample* Clone() const;
void Update(int64_t& aMediaTime);
bool Pad(size_t aPaddingBytes);
stagefright::MediaBuffer* mMediaBuffer;
Microseconds decode_timestamp;
Microseconds composition_timestamp;
Microseconds duration;

Просмотреть файл

@ -22,17 +22,16 @@ namespace mp4_demuxer
struct StageFrightPrivate
{
sp<MPEG4Extractor> mExtractor;
StageFrightPrivate() : mCanSeek(false) {}
sp<MediaSource> mAudio;
MediaSource::ReadOptions mAudioOptions;
nsAutoPtr<SampleIterator> mAudioIterator;
sp<MediaSource> mVideo;
MediaSource::ReadOptions mVideoOptions;
nsAutoPtr<SampleIterator> mVideoIterator;
nsTArray<nsRefPtr<Index>> mIndexes;
bool mCanSeek;
};
class DataSourceAdapter : public DataSource
@ -79,7 +78,6 @@ MP4Demuxer::MP4Demuxer(Stream* source, Monitor* aMonitor)
, mMonitor(aMonitor)
, mNextKeyframeTime(-1)
{
mPrivate->mExtractor = new MPEG4Extractor(new DataSourceAdapter(source));
}
MP4Demuxer::~MP4Demuxer()
@ -96,7 +94,7 @@ bool
MP4Demuxer::Init()
{
mMonitor->AssertCurrentThreadOwns();
sp<MediaExtractor> e = mPrivate->mExtractor;
sp<MediaExtractor> e = new MPEG4Extractor(new DataSourceAdapter(mSource));
// Read the number of tracks. If we can't find any, make sure to bail now before
// attempting any new reads to make the retry system work.
@ -148,6 +146,7 @@ MP4Demuxer::Init()
// No duration were found in either tracks, use movie extend header box one.
mVideoConfig.duration = mAudioConfig.duration = movieDuration;
}
mPrivate->mCanSeek = e->flags() & MediaExtractor::CAN_SEEK;
return mPrivate->mAudio.get() || mPrivate->mVideo.get();
}
@ -177,7 +176,7 @@ bool
MP4Demuxer::CanSeek()
{
mMonitor->AssertCurrentThreadOwns();
return mPrivate->mExtractor->flags() & MediaExtractor::CAN_SEEK;
return mPrivate->mCanSeek;
}
void
@ -186,9 +185,6 @@ MP4Demuxer::SeekAudio(Microseconds aTime)
mMonitor->AssertCurrentThreadOwns();
if (mPrivate->mAudioIterator) {
mPrivate->mAudioIterator->Seek(aTime);
} else {
mPrivate->mAudioOptions.setSeekTo(
aTime, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
}
}
@ -198,9 +194,6 @@ MP4Demuxer::SeekVideo(Microseconds aTime)
mMonitor->AssertCurrentThreadOwns();
if (mPrivate->mVideoIterator) {
mPrivate->mVideoIterator->Seek(aTime);
} else {
mPrivate->mVideoOptions.setSeekTo(
aTime, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
}
}
@ -208,29 +201,17 @@ MP4Sample*
MP4Demuxer::DemuxAudioSample()
{
mMonitor->AssertCurrentThreadOwns();
if (mPrivate->mAudioIterator) {
nsAutoPtr<MP4Sample> sample(mPrivate->mAudioIterator->GetNext());
if (sample) {
if (sample->crypto.valid) {
sample->crypto.mode = mAudioConfig.crypto.mode;
sample->crypto.iv_size = mAudioConfig.crypto.iv_size;
sample->crypto.key.AppendElements(mAudioConfig.crypto.key);
}
}
return sample.forget();
}
nsAutoPtr<MP4Sample> sample(new MP4Sample());
status_t status =
mPrivate->mAudio->read(&sample->mMediaBuffer, &mPrivate->mAudioOptions);
mPrivate->mAudioOptions.clearSeekTo();
if (status < 0) {
if (!mPrivate->mAudioIterator) {
return nullptr;
}
sample->Update(mAudioConfig.media_time);
nsAutoPtr<MP4Sample> sample(mPrivate->mAudioIterator->GetNext());
if (sample) {
if (sample->crypto.valid) {
sample->crypto.mode = mAudioConfig.crypto.mode;
sample->crypto.iv_size = mAudioConfig.crypto.iv_size;
sample->crypto.key.AppendElements(mAudioConfig.crypto.key);
}
}
return sample.forget();
}
@ -238,33 +219,20 @@ MP4Sample*
MP4Demuxer::DemuxVideoSample()
{
mMonitor->AssertCurrentThreadOwns();
if (mPrivate->mVideoIterator) {
nsAutoPtr<MP4Sample> sample(mPrivate->mVideoIterator->GetNext());
if (sample) {
sample->extra_data = mVideoConfig.extra_data;
if (sample->crypto.valid) {
sample->crypto.mode = mVideoConfig.crypto.mode;
sample->crypto.key.AppendElements(mVideoConfig.crypto.key);
}
if (sample->composition_timestamp >= mNextKeyframeTime) {
mNextKeyframeTime = mPrivate->mVideoIterator->GetNextKeyframeTime();
}
}
return sample.forget();
}
nsAutoPtr<MP4Sample> sample(new MP4Sample());
status_t status =
mPrivate->mVideo->read(&sample->mMediaBuffer, &mPrivate->mVideoOptions);
mPrivate->mVideoOptions.clearSeekTo();
if (status < 0) {
if (!mPrivate->mVideoIterator) {
return nullptr;
}
sample->Update(mVideoConfig.media_time);
sample->extra_data = mVideoConfig.extra_data;
nsAutoPtr<MP4Sample> sample(mPrivate->mVideoIterator->GetNext());
if (sample) {
sample->extra_data = mVideoConfig.extra_data;
if (sample->crypto.valid) {
sample->crypto.mode = mVideoConfig.crypto.mode;
sample->crypto.key.AppendElements(mVideoConfig.crypto.key);
}
if (sample->composition_timestamp >= mNextKeyframeTime) {
mNextKeyframeTime = mPrivate->mVideoIterator->GetNextKeyframeTime();
}
}
return sample.forget();
}