Bug 1098126 - Use MoofParser to read fragmented MP4 data; r=mattwoodrow

This commit is contained in:
Anthony Jones 2014-12-11 15:29:37 +13:00
Родитель 87edf66046
Коммит a48fab1845
6 изменённых файлов: 157 добавлений и 15 удалений

Просмотреть файл

@ -75,8 +75,94 @@ RangeFinder::Contains(MediaByteRange aByteRange)
return false;
}
SampleIterator::SampleIterator(Index* aIndex)
: mIndex(aIndex)
, mCurrentMoof(0)
, mCurrentSample(0)
{
}
MP4Sample* SampleIterator::GetNext()
{
nsAutoPtr<MP4Sample> sample(Get());
if (!sample) {
return nullptr;
}
// Do the blocking read
sample->data = sample->extra_buffer = new uint8_t[sample->size];
size_t bytesRead;
mIndex->mSource->ReadAt(sample->byte_offset, sample->data, sample->size,
&bytesRead);
// Lets just return what we've got so that we propagate the error
sample->size = bytesRead;
Next();
return sample.forget();
}
MP4Sample* SampleIterator::Get()
{
if (!mIndex->mMoofParser) {
return nullptr;
}
nsTArray<Moof>& moofs = mIndex->mMoofParser->mMoofs;
while (true) {
if (mCurrentMoof >= moofs.Length()) {
return nsAutoPtr<MP4Sample>();
}
if (mCurrentSample < moofs[mCurrentMoof].mIndex.Length()) {
break;
}
mCurrentSample = 0;
++mCurrentMoof;
}
Sample& s = moofs[mCurrentMoof].mIndex[mCurrentSample];
nsAutoPtr<MP4Sample> sample(new MP4Sample());
sample->decode_timestamp = s.mDecodeTime;
sample->composition_timestamp = s.mCompositionRange.start;
sample->duration = s.mCompositionRange.end - s.mCompositionRange.start;
sample->byte_offset = s.mByteRange.mStart;
sample->is_sync_point = s.mSync;
sample->size = s.mByteRange.mEnd - s.mByteRange.mStart;
return sample.forget();
}
void SampleIterator::Next()
{
++mCurrentSample;
}
void SampleIterator::Seek(Microseconds aTime)
{
size_t syncMoof = 0;
size_t syncSample = 0;
mCurrentMoof = 0;
mCurrentSample = 0;
while (true) {
nsAutoPtr<MP4Sample> sample(Get());
if (sample->composition_timestamp > aTime) {
break;
}
if (sample->is_sync_point) {
syncMoof = mCurrentMoof;
syncSample = mCurrentSample;
}
Next();
}
mCurrentMoof = syncMoof;
mCurrentSample = syncSample;
}
Index::Index(const stagefright::Vector<MediaSource::Indice>& aIndex,
Stream* aSource, uint32_t aTrackId)
: mSource(aSource)
{
if (aIndex.isEmpty()) {
mMoofParser = new MoofParser(aSource, aTrackId);

Просмотреть файл

@ -210,6 +210,7 @@ Moof::ParseTrun(Box& aBox, Tfhd& aTfhd, Tfdt& aTfdt, Mdhd& aMdhd, Edts& aEdts)
sample.mByteRange = MediaByteRange(offset, offset + sampleSize);
offset += sampleSize;
sample.mDecodeTime = decodeTime;
sample.mCompositionRange = Interval<Microseconds>(
aMdhd.ToMicroseconds(decodeTime + ctsOffset - aEdts.mMediaStart),
aMdhd.ToMicroseconds(decodeTime + ctsOffset + sampleDuration - aEdts.mMediaStart));

Просмотреть файл

@ -165,7 +165,6 @@ public:
void Prepend(const uint8_t* aData, size_t aSize);
private:
nsAutoArrayPtr<uint8_t> extra_buffer;
};
}

Просмотреть файл

@ -14,13 +14,30 @@ namespace mp4_demuxer
template <typename T> class Interval;
class MoofParser;
class Sample;
class Index;
class SampleIterator
{
public:
SampleIterator(Index* aIndex);
MP4Sample* GetNext();
void Seek(Microseconds aTime);
private:
MP4Sample* Get();
void Next();
nsRefPtr<Index> mIndex;
size_t mCurrentMoof;
size_t mCurrentSample;
};
class Index
{
public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(Index)
Index(const stagefright::Vector<stagefright::MediaSource::Indice>& aIndex,
Stream* aSource, uint32_t aTrackId);
~Index();
void UpdateMoofIndex(const nsTArray<mozilla::MediaByteRange>& aByteRanges);
Microseconds GetEndCompositionIfBuffered(
@ -29,8 +46,14 @@ public:
const nsTArray<mozilla::MediaByteRange>& aByteRanges,
nsTArray<Interval<Microseconds>>* aTimeRanges);
uint64_t GetEvictionOffset(Microseconds aTime);
bool IsFragmented() { return mMoofParser; }
friend class SampleIterator;
private:
~Index();
Stream* mSource;
nsTArray<Sample> mIndex;
nsAutoPtr<MoofParser> mMoofParser;
};

Просмотреть файл

@ -108,6 +108,7 @@ public:
struct Sample
{
mozilla::MediaByteRange mByteRange;
Microseconds mDecodeTime;
Interval<Microseconds> mCompositionRange;
bool mSync;
};

Просмотреть файл

@ -26,11 +26,13 @@ struct StageFrightPrivate
sp<MediaSource> mAudio;
MediaSource::ReadOptions mAudioOptions;
nsAutoPtr<SampleIterator> mAudioIterator;
sp<MediaSource> mVideo;
MediaSource::ReadOptions mVideoOptions;
nsAutoPtr<SampleIterator> mVideoIterator;
nsTArray<nsAutoPtr<Index>> mIndexes;
nsTArray<nsRefPtr<Index>> mIndexes;
};
class DataSourceAdapter : public DataSource
@ -100,21 +102,31 @@ MP4Demuxer::Init()
}
if (!mPrivate->mAudio.get() && !strncmp(mimeType, "audio/", 6)) {
mPrivate->mAudio = e->getTrack(i);
if (mPrivate->mAudio->start() != OK) {
sp<MediaSource> track = e->getTrack(i);
if (track->start() != OK) {
return false;
}
mPrivate->mAudio = track;
mAudioConfig.Update(metaData, mimeType);
mPrivate->mIndexes.AppendElement(new Index(
mPrivate->mAudio->exportIndex(), mSource, mAudioConfig.mTrackId));
nsRefPtr<Index> index = new Index(mPrivate->mAudio->exportIndex(),
mSource, mAudioConfig.mTrackId);
mPrivate->mIndexes.AppendElement(index);
if (index->IsFragmented()) {
mPrivate->mAudioIterator = new SampleIterator(index);
}
} else if (!mPrivate->mVideo.get() && !strncmp(mimeType, "video/", 6)) {
mPrivate->mVideo = e->getTrack(i);
if (mPrivate->mVideo->start() != OK) {
sp<MediaSource> track = e->getTrack(i);
if (track->start() != OK) {
return false;
}
mPrivate->mVideo = track;
mVideoConfig.Update(metaData, mimeType);
mPrivate->mIndexes.AppendElement(new Index(
mPrivate->mVideo->exportIndex(), mSource, mVideoConfig.mTrackId));
nsRefPtr<Index> index = new Index(mPrivate->mVideo->exportIndex(),
mSource, mVideoConfig.mTrackId);
mPrivate->mIndexes.AppendElement(index);
if (index->IsFragmented()) {
mPrivate->mVideoIterator = new SampleIterator(index);
}
}
}
sp<MetaData> metaData = e->getMetaData();
@ -150,20 +162,32 @@ MP4Demuxer::CanSeek()
void
MP4Demuxer::SeekAudio(Microseconds aTime)
{
mPrivate->mAudioOptions.setSeekTo(
aTime, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
if (mPrivate->mAudioIterator) {
mPrivate->mAudioIterator->Seek(aTime);
} else {
mPrivate->mAudioOptions.setSeekTo(
aTime, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
}
}
void
MP4Demuxer::SeekVideo(Microseconds aTime)
{
mPrivate->mVideoOptions.setSeekTo(
aTime, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
if (mPrivate->mVideoIterator) {
mPrivate->mVideoIterator->Seek(aTime);
} else {
mPrivate->mVideoOptions.setSeekTo(
aTime, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
}
}
MP4Sample*
MP4Demuxer::DemuxAudioSample()
{
if (mPrivate->mAudioIterator) {
return mPrivate->mAudioIterator->GetNext();
}
nsAutoPtr<MP4Sample> sample(new MP4Sample());
status_t status =
mPrivate->mAudio->read(&sample->mMediaBuffer, &mPrivate->mAudioOptions);
@ -181,6 +205,14 @@ MP4Demuxer::DemuxAudioSample()
MP4Sample*
MP4Demuxer::DemuxVideoSample()
{
if (mPrivate->mVideoIterator) {
nsAutoPtr<MP4Sample> sample(mPrivate->mVideoIterator->GetNext());
if (sample) {
sample->prefix_data = mVideoConfig.annex_b;
}
return sample.forget();
}
nsAutoPtr<MP4Sample> sample(new MP4Sample());
status_t status =
mPrivate->mVideo->read(&sample->mMediaBuffer, &mPrivate->mVideoOptions);