Bug 1266644 - Rename StreamBuffer to StreamTracks. r=jesup r=pehrsons

Rename StreamBuffer to StreamTracks. We still need a place to keep the track information in every MediaStream, even the StreamBuffer::Track::mSegment is empty.

--HG--
rename : dom/media/StreamBuffer.cpp => StreamTracks.cpp
rename : dom/media/StreamBuffer.h => StreamTracks.h
This commit is contained in:
ctai 2016-01-26 10:49:01 +08:00
Родитель 2f90ac66c5
Коммит 8fcb64e480
29 изменённых файлов: 566 добавлений и 106 удалений

Просмотреть файл

Просмотреть файл

Просмотреть файл

@ -71,7 +71,7 @@ AudioCaptureStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
} }
uint32_t inputCount = mInputs.Length(); uint32_t inputCount = mInputs.Length();
StreamBuffer::Track* track = EnsureTrack(mTrackId); StreamTracks::Track* track = EnsureTrack(mTrackId);
// Notify the DOM everything is in order. // Notify the DOM everything is in order.
if (!mTrackCreated) { if (!mTrackCreated) {
for (uint32_t i = 0; i < mListeners.Length(); i++) { for (uint32_t i = 0; i < mListeners.Length(); i++) {
@ -101,7 +101,7 @@ AudioCaptureStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
AudioSegment output; AudioSegment output;
for (uint32_t i = 0; i < inputCount; i++) { for (uint32_t i = 0; i < inputCount; i++) {
MediaStream* s = mInputs[i]->GetSource(); MediaStream* s = mInputs[i]->GetSource();
StreamBuffer::TrackIter tracks(s->GetStreamBuffer(), MediaSegment::AUDIO); StreamTracks::TrackIter tracks(s->GetStreamTracks(), MediaSegment::AUDIO);
while (!tracks.IsEnded()) { while (!tracks.IsEnded()) {
AudioSegment* inputSegment = tracks->Get<AudioSegment>(); AudioSegment* inputSegment = tracks->Get<AudioSegment>();
StreamTime inputStart = s->GraphTimeToStreamTimeWithBlocking(aFrom); StreamTime inputStart = s->GraphTimeToStreamTimeWithBlocking(aFrom);
@ -121,7 +121,7 @@ AudioCaptureStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
} }
// Regardless of the status of the input tracks, we go foward. // Regardless of the status of the input tracks, we go foward.
mBuffer.AdvanceKnownTracksTime(GraphTimeToStreamTimeWithBlocking((aTo))); mTracks.AdvanceKnownTracksTime(GraphTimeToStreamTimeWithBlocking((aTo)));
} }
void void

Просмотреть файл

@ -8,7 +8,7 @@
#include "MediaStreamGraph.h" #include "MediaStreamGraph.h"
#include "AudioMixer.h" #include "AudioMixer.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
#include <algorithm> #include <algorithm>
namespace mozilla namespace mozilla

Просмотреть файл

@ -8,7 +8,7 @@
#include "DOMMediaStream.h" #include "DOMMediaStream.h"
#include "mozilla/dom/HTMLCanvasElement.h" #include "mozilla/dom/HTMLCanvasElement.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
class nsIPrincipal; class nsIPrincipal;

Просмотреть файл

@ -1443,7 +1443,7 @@ DOMHwMediaStream::SetImageSize(uint32_t width, uint32_t height)
#endif #endif
SourceMediaStream* srcStream = GetInputStream()->AsSourceStream(); SourceMediaStream* srcStream = GetInputStream()->AsSourceStream();
StreamBuffer::Track* track = srcStream->FindTrack(TRACK_VIDEO_PRIMARY); StreamTracks::Track* track = srcStream->FindTrack(TRACK_VIDEO_PRIMARY);
if (!track || !track->GetSegment()) { if (!track || !track->GetSegment()) {
return; return;
@ -1479,7 +1479,7 @@ DOMHwMediaStream::SetOverlayImage(OverlayImage* aImage)
#endif #endif
SourceMediaStream* srcStream = GetInputStream()->AsSourceStream(); SourceMediaStream* srcStream = GetInputStream()->AsSourceStream();
StreamBuffer::Track* track = srcStream->FindTrack(TRACK_VIDEO_PRIMARY); StreamTracks::Track* track = srcStream->FindTrack(TRACK_VIDEO_PRIMARY);
if (!track || !track->GetSegment()) { if (!track || !track->GetSegment()) {
return; return;

Просмотреть файл

@ -10,7 +10,7 @@
#include "nsCycleCollectionParticipant.h" #include "nsCycleCollectionParticipant.h"
#include "nsWrapperCache.h" #include "nsWrapperCache.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
#include "nsIDOMWindow.h" #include "nsIDOMWindow.h"
#include "nsIPrincipal.h" #include "nsIPrincipal.h"
#include "mozilla/DOMEventTargetHelper.h" #include "mozilla/DOMEventTargetHelper.h"

Просмотреть файл

@ -14,7 +14,7 @@
#include "nsTArray.h" #include "nsTArray.h"
#include "ImageTypes.h" #include "ImageTypes.h"
#include "MediaData.h" #include "MediaData.h"
#include "StreamBuffer.h" // for TrackID #include "StreamTracks.h" // for TrackID
#include "TimeUnits.h" #include "TimeUnits.h"
namespace mozilla { namespace mozilla {

Просмотреть файл

@ -45,7 +45,7 @@ typedef int64_t MediaTime;
const int64_t MEDIA_TIME_MAX = TRACK_TICKS_MAX; const int64_t MEDIA_TIME_MAX = TRACK_TICKS_MAX;
/** /**
* Media time relative to the start of a StreamBuffer. * Media time relative to the start of a StreamTracks.
*/ */
typedef MediaTime StreamTime; typedef MediaTime StreamTime;
const StreamTime STREAM_TIME_MAX = MEDIA_TIME_MAX; const StreamTime STREAM_TIME_MAX = MEDIA_TIME_MAX;

Просмотреть файл

@ -81,7 +81,7 @@ MediaStreamGraphImpl::FinishStream(MediaStream* aStream)
return; return;
STREAM_LOG(LogLevel::Debug, ("MediaStream %p will finish", aStream)); STREAM_LOG(LogLevel::Debug, ("MediaStream %p will finish", aStream));
#ifdef DEBUG #ifdef DEBUG
for (StreamBuffer::TrackIter track(aStream->mBuffer); for (StreamTracks::TrackIter track(aStream->mTracks);
!track.IsEnded(); track.Next()) { !track.IsEnded(); track.Next()) {
if (!track->IsEnded()) { if (!track->IsEnded()) {
STREAM_LOG(LogLevel::Error, STREAM_LOG(LogLevel::Error,
@ -92,7 +92,7 @@ MediaStreamGraphImpl::FinishStream(MediaStream* aStream)
} }
#endif #endif
aStream->mFinished = true; aStream->mFinished = true;
aStream->mBuffer.AdvanceKnownTracksTime(STREAM_TIME_MAX); aStream->mTracks.AdvanceKnownTracksTime(STREAM_TIME_MAX);
SetStreamOrderDirty(); SetStreamOrderDirty();
} }
@ -100,7 +100,7 @@ MediaStreamGraphImpl::FinishStream(MediaStream* aStream)
void void
MediaStreamGraphImpl::AddStreamGraphThread(MediaStream* aStream) MediaStreamGraphImpl::AddStreamGraphThread(MediaStream* aStream)
{ {
aStream->mBufferStartTime = mProcessedTime; aStream->mTracksStartTime = mProcessedTime;
if (aStream->IsSuspended()) { if (aStream->IsSuspended()) {
mSuspendedStreams.AppendElement(aStream); mSuspendedStreams.AppendElement(aStream);
STREAM_LOG(LogLevel::Debug, ("Adding media stream %p to the graph, in the suspended stream array", aStream)); STREAM_LOG(LogLevel::Debug, ("Adding media stream %p to the graph, in the suspended stream array", aStream));
@ -160,14 +160,14 @@ MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream,
StreamTime t = aStream->GraphTimeToStreamTime(aDesiredUpToTime); StreamTime t = aStream->GraphTimeToStreamTime(aDesiredUpToTime);
STREAM_LOG(LogLevel::Verbose, ("Calling NotifyPull aStream=%p t=%f current end=%f", aStream, STREAM_LOG(LogLevel::Verbose, ("Calling NotifyPull aStream=%p t=%f current end=%f", aStream,
MediaTimeToSeconds(t), MediaTimeToSeconds(t),
MediaTimeToSeconds(aStream->mBuffer.GetEnd()))); MediaTimeToSeconds(aStream->mTracks.GetEnd())));
if (t > aStream->mBuffer.GetEnd()) { if (t > aStream->mTracks.GetEnd()) {
*aEnsureNextIteration = true; *aEnsureNextIteration = true;
#ifdef DEBUG #ifdef DEBUG
if (aStream->mListeners.Length() == 0) { if (aStream->mListeners.Length() == 0) {
STREAM_LOG(LogLevel::Error, ("No listeners in NotifyPull aStream=%p desired=%f current end=%f", STREAM_LOG(LogLevel::Error, ("No listeners in NotifyPull aStream=%p desired=%f current end=%f",
aStream, MediaTimeToSeconds(t), aStream, MediaTimeToSeconds(t),
MediaTimeToSeconds(aStream->mBuffer.GetEnd()))); MediaTimeToSeconds(aStream->mTracks.GetEnd())));
aStream->DumpTrackInfo(); aStream->DumpTrackInfo();
} }
#endif #endif
@ -186,7 +186,7 @@ MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream,
SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i]; SourceMediaStream::TrackData* data = &aStream->mUpdateTracks[i];
aStream->ApplyTrackDisabling(data->mID, data->mData); aStream->ApplyTrackDisabling(data->mID, data->mData);
StreamTime offset = (data->mCommands & SourceMediaStream::TRACK_CREATE) StreamTime offset = (data->mCommands & SourceMediaStream::TRACK_CREATE)
? data->mStart : aStream->mBuffer.FindTrack(data->mID)->GetSegment()->GetDuration(); ? data->mStart : aStream->mTracks.FindTrack(data->mID)->GetSegment()->GetDuration();
for (MediaStreamListener* l : aStream->mListeners) { for (MediaStreamListener* l : aStream->mListeners) {
l->NotifyQueuedTrackChanges(this, data->mID, l->NotifyQueuedTrackChanges(this, data->mID,
offset, data->mCommands, *data->mData); offset, data->mCommands, *data->mData);
@ -207,14 +207,14 @@ MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream,
int64_t(segment->GetDuration()))); int64_t(segment->GetDuration())));
data->mEndOfFlushedData += segment->GetDuration(); data->mEndOfFlushedData += segment->GetDuration();
aStream->mBuffer.AddTrack(data->mID, data->mStart, segment); aStream->mTracks.AddTrack(data->mID, data->mStart, segment);
// The track has taken ownership of data->mData, so let's replace // The track has taken ownership of data->mData, so let's replace
// data->mData with an empty clone. // data->mData with an empty clone.
data->mData = segment->CreateEmptyClone(); data->mData = segment->CreateEmptyClone();
data->mCommands &= ~SourceMediaStream::TRACK_CREATE; data->mCommands &= ~SourceMediaStream::TRACK_CREATE;
notifiedTrackCreated = true; notifiedTrackCreated = true;
} else if (data->mData->GetDuration() > 0) { } else if (data->mData->GetDuration() > 0) {
MediaSegment* dest = aStream->mBuffer.FindTrack(data->mID)->GetSegment(); MediaSegment* dest = aStream->mTracks.FindTrack(data->mID)->GetSegment();
STREAM_LOG(LogLevel::Verbose, ("SourceMediaStream %p track %d, advancing end from %lld to %lld", STREAM_LOG(LogLevel::Verbose, ("SourceMediaStream %p track %d, advancing end from %lld to %lld",
aStream, data->mID, aStream, data->mID,
int64_t(dest->GetDuration()), int64_t(dest->GetDuration()),
@ -223,7 +223,7 @@ MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream,
dest->AppendFrom(data->mData); dest->AppendFrom(data->mData);
} }
if (data->mCommands & SourceMediaStream::TRACK_END) { if (data->mCommands & SourceMediaStream::TRACK_END) {
aStream->mBuffer.FindTrack(data->mID)->SetEnded(); aStream->mTracks.FindTrack(data->mID)->SetEnded();
aStream->mUpdateTracks.RemoveElementAt(i); aStream->mUpdateTracks.RemoveElementAt(i);
} }
} }
@ -233,10 +233,10 @@ MediaStreamGraphImpl::ExtractPendingInput(SourceMediaStream* aStream,
} }
} }
if (!aStream->mFinished) { if (!aStream->mFinished) {
aStream->mBuffer.AdvanceKnownTracksTime(aStream->mUpdateKnownTracksTime); aStream->mTracks.AdvanceKnownTracksTime(aStream->mUpdateKnownTracksTime);
} }
} }
if (aStream->mBuffer.GetEnd() > 0) { if (aStream->mTracks.GetEnd() > 0) {
aStream->mHasCurrentData = true; aStream->mHasCurrentData = true;
} }
if (finished) { if (finished) {
@ -251,7 +251,7 @@ MediaStreamGraphImpl::GraphTimeToStreamTimeWithBlocking(MediaStream* aStream,
MOZ_ASSERT(aTime <= mStateComputedTime, MOZ_ASSERT(aTime <= mStateComputedTime,
"Don't ask about times where we haven't made blocking decisions yet"); "Don't ask about times where we haven't made blocking decisions yet");
return std::max<StreamTime>(0, return std::max<StreamTime>(0,
std::min(aTime, aStream->mStartBlocking) - aStream->mBufferStartTime); std::min(aTime, aStream->mStartBlocking) - aStream->mTracksStartTime);
} }
GraphTime GraphTime
@ -274,7 +274,7 @@ MediaStreamGraphImpl::UpdateCurrentTimeForStreams(GraphTime aPrevCurrentTime)
blockedTime); blockedTime);
STREAM_LOG(LogLevel::Verbose, STREAM_LOG(LogLevel::Verbose,
("MediaStream %p bufferStartTime=%f blockedTime=%f", stream, ("MediaStream %p bufferStartTime=%f blockedTime=%f", stream,
MediaTimeToSeconds(stream->mBufferStartTime), MediaTimeToSeconds(stream->mTracksStartTime),
MediaTimeToSeconds(blockedTime))); MediaTimeToSeconds(blockedTime)));
stream->mStartBlocking = mStateComputedTime; stream->mStartBlocking = mStateComputedTime;
@ -306,7 +306,7 @@ MediaStreamGraphImpl::UpdateCurrentTimeForStreams(GraphTime aPrevCurrentTime)
// out. // out.
if (stream->mFinished && !stream->mNotifiedFinished && if (stream->mFinished && !stream->mNotifiedFinished &&
mProcessedTime >= mProcessedTime >=
stream->StreamTimeToGraphTime(stream->GetStreamBuffer().GetAllTracksEnd())) { stream->StreamTimeToGraphTime(stream->GetStreamTracks().GetAllTracksEnd())) {
stream->mNotifiedFinished = true; stream->mNotifiedFinished = true;
SetStreamOrderDirty(); SetStreamOrderDirty();
for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) { for (uint32_t j = 0; j < stream->mListeners.Length(); ++j) {
@ -362,7 +362,7 @@ MediaStreamGraphImpl::ProcessChunkMetadata(GraphTime aPrevCurrentTime)
for (MediaStream* stream : AllStreams()) { for (MediaStream* stream : AllStreams()) {
StreamTime iterationStart = stream->GraphTimeToStreamTime(aPrevCurrentTime); StreamTime iterationStart = stream->GraphTimeToStreamTime(aPrevCurrentTime);
StreamTime iterationEnd = stream->GraphTimeToStreamTime(mProcessedTime); StreamTime iterationEnd = stream->GraphTimeToStreamTime(mProcessedTime);
for (StreamBuffer::TrackIter tracks(stream->mBuffer); for (StreamTracks::TrackIter tracks(stream->mTracks);
!tracks.IsEnded(); tracks.Next()) { !tracks.IsEnded(); tracks.Next()) {
MediaSegment* segment = tracks->GetSegment(); MediaSegment* segment = tracks->GetSegment();
if (!segment) { if (!segment) {
@ -396,13 +396,13 @@ MediaStreamGraphImpl::WillUnderrun(MediaStream* aStream,
// This stream isn't finished or suspended. We don't need to call // This stream isn't finished or suspended. We don't need to call
// StreamTimeToGraphTime since an underrun is the only thing that can block // StreamTimeToGraphTime since an underrun is the only thing that can block
// it. // it.
GraphTime bufferEnd = aStream->GetBufferEnd() + aStream->mBufferStartTime; GraphTime bufferEnd = aStream->GetTracksEnd() + aStream->mTracksStartTime;
#ifdef DEBUG #ifdef DEBUG
if (bufferEnd < mProcessedTime) { if (bufferEnd < mProcessedTime) {
STREAM_LOG(LogLevel::Error, ("MediaStream %p underrun, " STREAM_LOG(LogLevel::Error, ("MediaStream %p underrun, "
"bufferEnd %f < mProcessedTime %f (%lld < %lld), Streamtime %lld", "bufferEnd %f < mProcessedTime %f (%lld < %lld), Streamtime %lld",
aStream, MediaTimeToSeconds(bufferEnd), MediaTimeToSeconds(mProcessedTime), aStream, MediaTimeToSeconds(bufferEnd), MediaTimeToSeconds(mProcessedTime),
bufferEnd, mProcessedTime, aStream->GetBufferEnd())); bufferEnd, mProcessedTime, aStream->GetTracksEnd()));
aStream->DumpTrackInfo(); aStream->DumpTrackInfo();
NS_ASSERTION(bufferEnd >= mProcessedTime, "Buffer underran"); NS_ASSERTION(bufferEnd >= mProcessedTime, "Buffer underran");
} }
@ -435,7 +435,7 @@ MediaStreamGraphImpl::AudioTrackPresent(bool& aNeedsAEC)
if (stream->AsAudioNodeStream()) { if (stream->AsAudioNodeStream()) {
audioTrackPresent = true; audioTrackPresent = true;
} else { } else {
for (StreamBuffer::TrackIter tracks(stream->GetStreamBuffer(), MediaSegment::AUDIO); for (StreamTracks::TrackIter tracks(stream->GetStreamTracks(), MediaSegment::AUDIO);
!tracks.IsEnded(); tracks.Next()) { !tracks.IsEnded(); tracks.Next()) {
audioTrackPresent = true; audioTrackPresent = true;
} }
@ -729,7 +729,7 @@ MediaStreamGraphImpl::CreateOrDestroyAudioStreams(MediaStream* aStream)
return; return;
} }
if (!aStream->GetStreamBuffer().GetAndResetTracksDirty() && if (!aStream->GetStreamTracks().GetAndResetTracksDirty() &&
!aStream->mAudioOutputStreams.IsEmpty()) { !aStream->mAudioOutputStreams.IsEmpty()) {
return; return;
} }
@ -741,7 +741,7 @@ MediaStreamGraphImpl::CreateOrDestroyAudioStreams(MediaStream* aStream)
audioOutputStreamsFound.AppendElement(false); audioOutputStreamsFound.AppendElement(false);
} }
for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::AUDIO); for (StreamTracks::TrackIter tracks(aStream->GetStreamTracks(), MediaSegment::AUDIO);
!tracks.IsEnded(); tracks.Next()) { !tracks.IsEnded(); tracks.Next()) {
uint32_t i; uint32_t i;
for (i = 0; i < audioOutputStreamsFound.Length(); ++i) { for (i = 0; i < audioOutputStreamsFound.Length(); ++i) {
@ -800,13 +800,13 @@ MediaStreamGraphImpl::PlayAudio(MediaStream* aStream)
ticksWritten = 0; ticksWritten = 0;
MediaStream::AudioOutputStream& audioOutput = aStream->mAudioOutputStreams[i]; MediaStream::AudioOutputStream& audioOutput = aStream->mAudioOutputStreams[i];
StreamBuffer::Track* track = aStream->mBuffer.FindTrack(audioOutput.mTrackID); StreamTracks::Track* track = aStream->mTracks.FindTrack(audioOutput.mTrackID);
AudioSegment* audio = track->Get<AudioSegment>(); AudioSegment* audio = track->Get<AudioSegment>();
AudioSegment output; AudioSegment output;
StreamTime offset = aStream->GraphTimeToStreamTime(mProcessedTime); StreamTime offset = aStream->GraphTimeToStreamTime(mProcessedTime);
// We don't update aStream->mBufferStartTime here to account for time spent // We don't update aStream->mTracksStartTime here to account for time spent
// blocked. Instead, we'll update it in UpdateCurrentTimeForStreams after // blocked. Instead, we'll update it in UpdateCurrentTimeForStreams after
// the blocked period has completed. But we do need to make sure we play // the blocked period has completed. But we do need to make sure we play
// from the right offsets in the stream buffer, even if we've already // from the right offsets in the stream buffer, even if we've already
@ -925,7 +925,7 @@ MediaStreamGraphImpl::PlayVideo(MediaStream* aStream)
PrincipalHandle lastPrincipalHandle = PRINCIPAL_HANDLE_NONE; PrincipalHandle lastPrincipalHandle = PRINCIPAL_HANDLE_NONE;
RefPtr<Image> blackImage; RefPtr<Image> blackImage;
MOZ_ASSERT(mProcessedTime >= aStream->mBufferStartTime, "frame position before buffer?"); MOZ_ASSERT(mProcessedTime >= aStream->mTracksStartTime, "frame position before buffer?");
// We only look at the non-blocking interval // We only look at the non-blocking interval
StreamTime frameBufferTime = aStream->GraphTimeToStreamTime(mProcessedTime); StreamTime frameBufferTime = aStream->GraphTimeToStreamTime(mProcessedTime);
StreamTime bufferEndTime = aStream->GraphTimeToStreamTime(aStream->mStartBlocking); StreamTime bufferEndTime = aStream->GraphTimeToStreamTime(aStream->mStartBlocking);
@ -937,7 +937,7 @@ MediaStreamGraphImpl::PlayVideo(MediaStream* aStream)
// Pick the last track that has a video chunk for the time, and // Pick the last track that has a video chunk for the time, and
// schedule its frame. // schedule its frame.
chunk = nullptr; chunk = nullptr;
for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), for (StreamTracks::TrackIter tracks(aStream->GetStreamTracks(),
MediaSegment::VIDEO); MediaSegment::VIDEO);
!tracks.IsEnded(); !tracks.IsEnded();
tracks.Next()) { tracks.Next()) {
@ -1368,14 +1368,14 @@ MediaStreamGraphImpl::UpdateGraph(GraphTime aEndBlockingDecisions)
// The stream's not suspended, and since it's finished, underruns won't // The stream's not suspended, and since it's finished, underruns won't
// stop it playing out. So there's no blocking other than what we impose // stop it playing out. So there's no blocking other than what we impose
// here. // here.
GraphTime endTime = stream->GetStreamBuffer().GetAllTracksEnd() + GraphTime endTime = stream->GetStreamTracks().GetAllTracksEnd() +
stream->mBufferStartTime; stream->mTracksStartTime;
if (endTime <= mStateComputedTime) { if (endTime <= mStateComputedTime) {
STREAM_LOG(LogLevel::Verbose, ("MediaStream %p is blocked due to being finished", stream)); STREAM_LOG(LogLevel::Verbose, ("MediaStream %p is blocked due to being finished", stream));
stream->mStartBlocking = mStateComputedTime; stream->mStartBlocking = mStateComputedTime;
} else { } else {
STREAM_LOG(LogLevel::Verbose, ("MediaStream %p is finished, but not blocked yet (end at %f, with blocking at %f)", STREAM_LOG(LogLevel::Verbose, ("MediaStream %p is finished, but not blocked yet (end at %f, with blocking at %f)",
stream, MediaTimeToSeconds(stream->GetBufferEnd()), stream, MediaTimeToSeconds(stream->GetTracksEnd()),
MediaTimeToSeconds(endTime))); MediaTimeToSeconds(endTime)));
// Data can't be added to a finished stream, so underruns are irrelevant. // Data can't be added to a finished stream, so underruns are irrelevant.
stream->mStartBlocking = std::min(endTime, aEndBlockingDecisions); stream->mStartBlocking = std::min(endTime, aEndBlockingDecisions);
@ -1439,7 +1439,7 @@ MediaStreamGraphImpl::Process()
} else { } else {
ps->ProcessInput(mProcessedTime, mStateComputedTime, ps->ProcessInput(mProcessedTime, mStateComputedTime,
ProcessedMediaStream::ALLOW_FINISH); ProcessedMediaStream::ALLOW_FINISH);
NS_WARN_IF_FALSE(stream->mBuffer.GetEnd() >= NS_WARN_IF_FALSE(stream->mTracks.GetEnd() >=
GraphTimeToStreamTimeWithBlocking(stream, mStateComputedTime), GraphTimeToStreamTimeWithBlocking(stream, mStateComputedTime),
"Stream did not produce enough data"); "Stream did not produce enough data");
} }
@ -1922,7 +1922,7 @@ MediaStreamGraphImpl::AppendMessage(UniquePtr<ControlMessage> aMessage)
} }
MediaStream::MediaStream(DOMMediaStream* aWrapper) MediaStream::MediaStream(DOMMediaStream* aWrapper)
: mBufferStartTime(0) : mTracksStartTime(0)
, mStartBlocking(GRAPH_TIME_MAX) , mStartBlocking(GRAPH_TIME_MAX)
, mSuspendedCount(0) , mSuspendedCount(0)
, mFinished(false) , mFinished(false)
@ -1962,7 +1962,7 @@ MediaStream::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
// - mListeners - elements // - mListeners - elements
// - mAudioOutputStream - elements // - mAudioOutputStream - elements
amount += mBuffer.SizeOfExcludingThis(aMallocSizeOf); amount += mTracks.SizeOfExcludingThis(aMallocSizeOf);
amount += mAudioOutputs.ShallowSizeOfExcludingThis(aMallocSizeOf); amount += mAudioOutputs.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += mVideoOutputs.ShallowSizeOfExcludingThis(aMallocSizeOf); amount += mVideoOutputs.ShallowSizeOfExcludingThis(aMallocSizeOf);
amount += mListeners.ShallowSizeOfExcludingThis(aMallocSizeOf); amount += mListeners.ShallowSizeOfExcludingThis(aMallocSizeOf);
@ -1997,7 +1997,7 @@ MediaStream::SetGraphImpl(MediaStreamGraphImpl* aGraph)
MOZ_ASSERT(!mGraph, "Should only be called once"); MOZ_ASSERT(!mGraph, "Should only be called once");
mGraph = aGraph; mGraph = aGraph;
mAudioChannelType = aGraph->AudioChannel(); mAudioChannelType = aGraph->AudioChannel();
mBuffer.InitGraphRate(aGraph->GraphRate()); mTracks.InitGraphRate(aGraph->GraphRate());
} }
void void
@ -2013,16 +2013,16 @@ MediaStream::GraphTimeToStreamTime(GraphTime aTime)
NS_ASSERTION(mStartBlocking == GraphImpl()->mStateComputedTime || NS_ASSERTION(mStartBlocking == GraphImpl()->mStateComputedTime ||
aTime <= mStartBlocking, aTime <= mStartBlocking,
"Incorrectly ignoring blocking!"); "Incorrectly ignoring blocking!");
return aTime - mBufferStartTime; return aTime - mTracksStartTime;
} }
GraphTime GraphTime
MediaStream::StreamTimeToGraphTime(StreamTime aTime) MediaStream::StreamTimeToGraphTime(StreamTime aTime)
{ {
NS_ASSERTION(mStartBlocking == GraphImpl()->mStateComputedTime || NS_ASSERTION(mStartBlocking == GraphImpl()->mStateComputedTime ||
aTime + mBufferStartTime <= mStartBlocking, aTime + mTracksStartTime <= mStartBlocking,
"Incorrectly ignoring blocking!"); "Incorrectly ignoring blocking!");
return aTime + mBufferStartTime; return aTime + mTracksStartTime;
} }
StreamTime StreamTime
@ -2037,16 +2037,16 @@ MediaStream::FinishOnGraphThread()
GraphImpl()->FinishStream(this); GraphImpl()->FinishStream(this);
} }
StreamBuffer::Track* StreamTracks::Track*
MediaStream::FindTrack(TrackID aID) MediaStream::FindTrack(TrackID aID)
{ {
return mBuffer.FindTrack(aID); return mTracks.FindTrack(aID);
} }
StreamBuffer::Track* StreamTracks::Track*
MediaStream::EnsureTrack(TrackID aTrackId) MediaStream::EnsureTrack(TrackID aTrackId)
{ {
StreamBuffer::Track* track = mBuffer.FindTrack(aTrackId); StreamTracks::Track* track = mTracks.FindTrack(aTrackId);
if (!track) { if (!track) {
nsAutoPtr<MediaSegment> segment(new AudioSegment()); nsAutoPtr<MediaSegment> segment(new AudioSegment());
for (uint32_t j = 0; j < mListeners.Length(); ++j) { for (uint32_t j = 0; j < mListeners.Length(); ++j) {
@ -2058,7 +2058,7 @@ MediaStream::EnsureTrack(TrackID aTrackId)
// change this. // change this.
l->NotifyFinishedTrackCreation(Graph()); l->NotifyFinishedTrackCreation(Graph());
} }
track = &mBuffer.AddTrack(aTrackId, 0, segment.forget()); track = &mTracks.AddTrack(aTrackId, 0, segment.forget());
} }
return track; return track;
} }
@ -2374,7 +2374,7 @@ MediaStream::AddTrackListenerImpl(already_AddRefed<MediaStreamTrackListener> aLi
l->mListener = aListener; l->mListener = aListener;
l->mTrackID = aTrackID; l->mTrackID = aTrackID;
StreamBuffer::Track* track = FindTrack(aTrackID); StreamTracks::Track* track = FindTrack(aTrackID);
if (!track) { if (!track) {
return; return;
} }

Просмотреть файл

@ -16,7 +16,7 @@
#include "AudioStream.h" #include "AudioStream.h"
#include "nsTArray.h" #include "nsTArray.h"
#include "nsIRunnable.h" #include "nsIRunnable.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
#include "VideoFrameContainer.h" #include "VideoFrameContainer.h"
#include "VideoSegment.h" #include "VideoSegment.h"
#include "MainThreadUtils.h" #include "MainThreadUtils.h"
@ -477,7 +477,7 @@ struct TrackBound
* time. To ensure video plays in sync with audio, make sure that the same * time. To ensure video plays in sync with audio, make sure that the same
* stream is playing both the audio and video. * stream is playing both the audio and video.
* *
* The data in a stream is managed by StreamBuffer. It consists of a set of * The data in a stream is managed by StreamTracks. It consists of a set of
* tracks of various types that can start and end over time. * tracks of various types that can start and end over time.
* *
* Streams are explicitly managed. The client creates them via * Streams are explicitly managed. The client creates them via
@ -541,7 +541,7 @@ public:
/** /**
* Returns sample rate of the graph. * Returns sample rate of the graph.
*/ */
TrackRate GraphRate() { return mBuffer.GraphRate(); } TrackRate GraphRate() { return mTracks.GraphRate(); }
// Control API. // Control API.
// Since a stream can be played multiple ways, we need to combine independent // Since a stream can be played multiple ways, we need to combine independent
@ -674,9 +674,9 @@ public:
* This must be idempotent. * This must be idempotent.
*/ */
virtual void DestroyImpl(); virtual void DestroyImpl();
StreamTime GetBufferEnd() { return mBuffer.GetEnd(); } StreamTime GetTracksEnd() { return mTracks.GetEnd(); }
#ifdef DEBUG #ifdef DEBUG
void DumpTrackInfo() { return mBuffer.DumpTrackInfo(); } void DumpTrackInfo() { return mTracks.DumpTrackInfo(); }
#endif #endif
void SetAudioOutputVolumeImpl(void* aKey, float aVolume); void SetAudioOutputVolumeImpl(void* aKey, float aVolume);
void AddAudioOutputImpl(void* aKey); void AddAudioOutputImpl(void* aKey);
@ -713,36 +713,36 @@ public:
{ {
return mConsumers.Length(); return mConsumers.Length();
} }
StreamBuffer& GetStreamBuffer() { return mBuffer; } StreamTracks& GetStreamTracks() { return mTracks; }
GraphTime GetStreamBufferStartTime() { return mBufferStartTime; } GraphTime GetStreamTracksStartTime() { return mTracksStartTime; }
double StreamTimeToSeconds(StreamTime aTime) double StreamTimeToSeconds(StreamTime aTime)
{ {
NS_ASSERTION(0 <= aTime && aTime <= STREAM_TIME_MAX, "Bad time"); NS_ASSERTION(0 <= aTime && aTime <= STREAM_TIME_MAX, "Bad time");
return static_cast<double>(aTime)/mBuffer.GraphRate(); return static_cast<double>(aTime)/mTracks.GraphRate();
} }
int64_t StreamTimeToMicroseconds(StreamTime aTime) int64_t StreamTimeToMicroseconds(StreamTime aTime)
{ {
NS_ASSERTION(0 <= aTime && aTime <= STREAM_TIME_MAX, "Bad time"); NS_ASSERTION(0 <= aTime && aTime <= STREAM_TIME_MAX, "Bad time");
return (aTime*1000000)/mBuffer.GraphRate(); return (aTime*1000000)/mTracks.GraphRate();
} }
StreamTime SecondsToNearestStreamTime(double aSeconds) StreamTime SecondsToNearestStreamTime(double aSeconds)
{ {
NS_ASSERTION(0 <= aSeconds && aSeconds <= TRACK_TICKS_MAX/TRACK_RATE_MAX, NS_ASSERTION(0 <= aSeconds && aSeconds <= TRACK_TICKS_MAX/TRACK_RATE_MAX,
"Bad seconds"); "Bad seconds");
return mBuffer.GraphRate() * aSeconds + 0.5; return mTracks.GraphRate() * aSeconds + 0.5;
} }
StreamTime MicrosecondsToStreamTimeRoundDown(int64_t aMicroseconds) { StreamTime MicrosecondsToStreamTimeRoundDown(int64_t aMicroseconds) {
return (aMicroseconds*mBuffer.GraphRate())/1000000; return (aMicroseconds*mTracks.GraphRate())/1000000;
} }
TrackTicks TimeToTicksRoundUp(TrackRate aRate, StreamTime aTime) TrackTicks TimeToTicksRoundUp(TrackRate aRate, StreamTime aTime)
{ {
return RateConvertTicksRoundUp(aRate, mBuffer.GraphRate(), aTime); return RateConvertTicksRoundUp(aRate, mTracks.GraphRate(), aTime);
} }
StreamTime TicksToTimeRoundDown(TrackRate aRate, TrackTicks aTicks) StreamTime TicksToTimeRoundDown(TrackRate aRate, TrackTicks aTicks)
{ {
return RateConvertTicksRoundDown(mBuffer.GraphRate(), aRate, aTicks); return RateConvertTicksRoundDown(mTracks.GraphRate(), aRate, aTicks);
} }
/** /**
* Convert graph time to stream time. aTime must be <= mStateComputedTime * Convert graph time to stream time. aTime must be <= mStateComputedTime
@ -773,9 +773,9 @@ public:
/** /**
* Find track by track id. * Find track by track id.
*/ */
StreamBuffer::Track* FindTrack(TrackID aID); StreamTracks::Track* FindTrack(TrackID aID);
StreamBuffer::Track* EnsureTrack(TrackID aTrack); StreamTracks::Track* EnsureTrack(TrackID aTrack);
virtual void ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment, MediaSegment* aRawSegment = nullptr); virtual void ApplyTrackDisabling(TrackID aTrackID, MediaSegment* aSegment, MediaSegment* aRawSegment = nullptr);
@ -808,8 +808,8 @@ public:
protected: protected:
void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime) void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime)
{ {
mBufferStartTime += aBlockedTime; mTracksStartTime += aBlockedTime;
mBuffer.ForgetUpTo(aCurrentTime - mBufferStartTime); mTracks.ForgetUpTo(aCurrentTime - mTracksStartTime);
} }
void NotifyMainThreadListeners() void NotifyMainThreadListeners()
@ -836,14 +836,14 @@ protected:
// This state is all initialized on the main thread but // This state is all initialized on the main thread but
// otherwise modified only on the media graph thread. // otherwise modified only on the media graph thread.
// Buffered data. The start of the buffer corresponds to mBufferStartTime. // Buffered data. The start of the buffer corresponds to mTracksStartTime.
// Conceptually the buffer contains everything this stream has ever played, // Conceptually the buffer contains everything this stream has ever played,
// but we forget some prefix of the buffered data to bound the space usage. // but we forget some prefix of the buffered data to bound the space usage.
StreamBuffer mBuffer; StreamTracks mTracks;
// The time when the buffered data could be considered to have started playing. // The time when the buffered data could be considered to have started playing.
// This increases over time to account for time the stream was blocked before // This increases over time to account for time the stream was blocked before
// mCurrentTime. // mCurrentTime.
GraphTime mBufferStartTime; GraphTime mTracksStartTime;
// Client-set volume of this stream // Client-set volume of this stream
struct AudioOutput { struct AudioOutput {
@ -863,7 +863,7 @@ protected:
// GraphTime at which this stream starts blocking. // GraphTime at which this stream starts blocking.
// This is only valid up to mStateComputedTime. The stream is considered to // This is only valid up to mStateComputedTime. The stream is considered to
// have not been blocked before mCurrentTime (its mBufferStartTime is increased // have not been blocked before mCurrentTime (its mTracksStartTime is increased
// as necessary to account for that time instead). // as necessary to account for that time instead).
GraphTime mStartBlocking; GraphTime mStartBlocking;

Просмотреть файл

@ -403,7 +403,7 @@ public:
void PlayVideo(MediaStream* aStream); void PlayVideo(MediaStream* aStream);
/** /**
* No more data will be forthcoming for aStream. The stream will end * No more data will be forthcoming for aStream. The stream will end
* at the current buffer end point. The StreamBuffer's tracks must be * at the current buffer end point. The StreamTracks's tracks must be
* explicitly set to finished by the caller. * explicitly set to finished by the caller.
*/ */
void OpenAudioInputImpl(int aID, void OpenAudioInputImpl(int aID,

Просмотреть файл

@ -10,7 +10,7 @@
#include "nsError.h" #include "nsError.h"
#include "nsID.h" #include "nsID.h"
#include "nsIPrincipal.h" #include "nsIPrincipal.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
#include "MediaTrackConstraints.h" #include "MediaTrackConstraints.h"
#include "mozilla/CORSMode.h" #include "mozilla/CORSMode.h"
#include "PrincipalChangeObserver.h" #include "PrincipalChangeObserver.h"

117
dom/media/StreamTracks.cpp Normal file
Просмотреть файл

@ -0,0 +1,117 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "StreamTracks.h"
#include "mozilla/Logging.h"
#include <algorithm>
namespace mozilla {
extern LazyLogModule gMediaStreamGraphLog;
#define STREAM_LOG(type, msg) MOZ_LOG(gMediaStreamGraphLog, type, msg)
#ifdef DEBUG
void
StreamTracks::DumpTrackInfo() const
{
STREAM_LOG(LogLevel::Info, ("DumpTracks: mTracksKnownTime %lld", mTracksKnownTime));
for (uint32_t i = 0; i < mTracks.Length(); ++i) {
Track* track = mTracks[i];
if (track->IsEnded()) {
STREAM_LOG(LogLevel::Info, ("Track[%d] %d: ended", i, track->GetID()));
} else {
STREAM_LOG(LogLevel::Info, ("Track[%d] %d: %lld", i, track->GetID(),
track->GetEnd()));
}
}
}
#endif
StreamTime
StreamTracks::GetEnd() const
{
StreamTime t = mTracksKnownTime;
for (uint32_t i = 0; i < mTracks.Length(); ++i) {
Track* track = mTracks[i];
if (!track->IsEnded()) {
t = std::min(t, track->GetEnd());
}
}
return t;
}
StreamTime
StreamTracks::GetAllTracksEnd() const
{
if (mTracksKnownTime < STREAM_TIME_MAX) {
// A track might be added.
return STREAM_TIME_MAX;
}
StreamTime t = 0;
for (uint32_t i = 0; i < mTracks.Length(); ++i) {
Track* track = mTracks[i];
if (!track->IsEnded()) {
return STREAM_TIME_MAX;
}
t = std::max(t, track->GetEnd());
}
return t;
}
StreamTracks::Track*
StreamTracks::FindTrack(TrackID aID)
{
if (aID == TRACK_NONE || mTracks.IsEmpty()) {
return nullptr;
}
// The tracks are sorted by ID. We can use a binary search.
uint32_t left = 0, right = mTracks.Length() - 1;
while (left <= right) {
uint32_t middle = (left + right) / 2;
if (mTracks[middle]->GetID() == aID) {
return mTracks[middle];
}
if (mTracks[middle]->GetID() > aID) {
if (middle == 0) {
break;
}
right = middle - 1;
} else {
left = middle + 1;
}
}
return nullptr;
}
void
StreamTracks::ForgetUpTo(StreamTime aTime)
{
// Only prune if there is a reasonable chunk (50ms @ 48kHz) to forget, so we
// don't spend too much time pruning segments.
const StreamTime minChunkSize = 2400;
if (aTime < mForgottenTime + minChunkSize) {
return;
}
mForgottenTime = aTime;
for (uint32_t i = 0; i < mTracks.Length(); ++i) {
Track* track = mTracks[i];
if (track->IsEnded() && track->GetEnd() <= aTime) {
mTracks.RemoveElementAt(i);
mTracksDirty = true;
--i;
continue;
}
StreamTime forgetTo = std::min(track->GetEnd() - 1, aTime);
track->ForgetUpTo(forgetTo);
}
}
} // namespace mozilla

343
dom/media/StreamTracks.h Normal file
Просмотреть файл

@ -0,0 +1,343 @@
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef MOZILLA_STREAMTRACKS_H_
#define MOZILLA_STREAMTRACKS_H_
#include "MediaSegment.h"
#include "nsAutoPtr.h"
namespace mozilla {
/**
* Unique ID for track within a StreamTracks. Tracks from different
* StreamTrackss may have the same ID; this matters when appending StreamTrackss,
* since tracks with the same ID are matched. Only IDs greater than 0 are allowed.
*/
typedef int32_t TrackID;
const TrackID TRACK_NONE = 0;
const TrackID TRACK_INVALID = -1;
const TrackID TRACK_ANY = -2;
inline bool IsTrackIDExplicit(const TrackID& aId) {
return aId > TRACK_NONE;
}
inline TrackTicks RateConvertTicksRoundDown(TrackRate aOutRate,
TrackRate aInRate,
TrackTicks aTicks)
{
NS_ASSERTION(0 < aOutRate && aOutRate <= TRACK_RATE_MAX, "Bad out rate");
NS_ASSERTION(0 < aInRate && aInRate <= TRACK_RATE_MAX, "Bad in rate");
NS_WARN_IF_FALSE(0 <= aTicks && aTicks <= TRACK_TICKS_MAX, "Bad ticks"); // bug 957691
return (aTicks * aOutRate) / aInRate;
}
inline TrackTicks RateConvertTicksRoundUp(TrackRate aOutRate,
TrackRate aInRate, TrackTicks aTicks)
{
NS_ASSERTION(0 < aOutRate && aOutRate <= TRACK_RATE_MAX, "Bad out rate");
NS_ASSERTION(0 < aInRate && aInRate <= TRACK_RATE_MAX, "Bad in rate");
NS_ASSERTION(0 <= aTicks && aTicks <= TRACK_TICKS_MAX, "Bad ticks");
return (aTicks * aOutRate + aInRate - 1) / aInRate;
}
/**
* This object contains the decoded data for a stream's tracks.
* A StreamTracks can be appended to. Logically a StreamTracks only gets longer,
* but we also have the ability to "forget" data before a certain time that
* we know won't be used again. (We prune a whole number of seconds internally.)
*
* StreamTrackss should only be used from one thread at a time.
*
* A StreamTracks has a set of tracks that can be of arbitrary types ---
* the data for each track is a MediaSegment. The set of tracks can vary
* over the timeline of the StreamTracks.
*/
class StreamTracks
{
public:
/**
* Every track has a start time --- when it started in the StreamTracks.
* It has an end flag; when false, no end point is known; when true,
* the track ends when the data we have for the track runs out.
* Tracks have a unique ID assigned at creation. This allows us to identify
* the same track across StreamTrackss. A StreamTracks should never have
* two tracks with the same ID (even if they don't overlap in time).
* TODO Tracks can also be enabled and disabled over time.
* Takes ownership of aSegment.
*/
class Track final
{
Track(TrackID aID, StreamTime aStart, MediaSegment* aSegment)
: mStart(aStart),
mSegment(aSegment),
mID(aID),
mEnded(false)
{
MOZ_COUNT_CTOR(Track);
NS_ASSERTION(aID > TRACK_NONE, "Bad track ID");
NS_ASSERTION(0 <= aStart && aStart <= aSegment->GetDuration(), "Bad start position");
}
public:
~Track()
{
MOZ_COUNT_DTOR(Track);
}
template <class T> T* Get() const
{
if (mSegment->GetType() == T::StaticType()) {
return static_cast<T*>(mSegment.get());
}
return nullptr;
}
MediaSegment* GetSegment() const { return mSegment; }
TrackID GetID() const { return mID; }
bool IsEnded() const { return mEnded; }
StreamTime GetStart() const { return mStart; }
StreamTime GetEnd() const { return mSegment->GetDuration(); }
MediaSegment::Type GetType() const { return mSegment->GetType(); }
void SetEnded() { mEnded = true; }
void AppendFrom(Track* aTrack)
{
NS_ASSERTION(!mEnded, "Can't append to ended track");
NS_ASSERTION(aTrack->mID == mID, "IDs must match");
NS_ASSERTION(aTrack->mStart == 0, "Source track must start at zero");
NS_ASSERTION(aTrack->mSegment->GetType() == GetType(), "Track types must match");
mSegment->AppendFrom(aTrack->mSegment);
mEnded = aTrack->mEnded;
}
MediaSegment* RemoveSegment()
{
return mSegment.forget();
}
void ForgetUpTo(StreamTime aTime)
{
mSegment->ForgetUpTo(aTime);
}
void FlushAfter(StreamTime aNewEnd)
{
// Forget everything after a given endpoint
// a specified amount
mSegment->FlushAfter(aNewEnd);
}
size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
{
size_t amount = aMallocSizeOf(this);
if (mSegment) {
amount += mSegment->SizeOfIncludingThis(aMallocSizeOf);
}
return amount;
}
private:
friend class StreamTracks;
// Start offset is in ticks at rate mRate
StreamTime mStart;
// The segment data starts at the start of the owning StreamTracks, i.e.,
// there's mStart silence/no video at the beginning.
nsAutoPtr<MediaSegment> mSegment;
// Unique ID
TrackID mID;
// True when the track ends with the data in mSegment
bool mEnded;
};
class MOZ_STACK_CLASS CompareTracksByID final
{
public:
bool Equals(Track* aA, Track* aB) const {
return aA->GetID() == aB->GetID();
}
bool LessThan(Track* aA, Track* aB) const {
return aA->GetID() < aB->GetID();
}
};
StreamTracks()
: mGraphRate(0)
, mTracksKnownTime(0)
, mForgottenTime(0)
, mTracksDirty(false)
#ifdef DEBUG
, mGraphRateIsSet(false)
#endif
{
MOZ_COUNT_CTOR(StreamTracks);
}
~StreamTracks()
{
MOZ_COUNT_DTOR(StreamTracks);
}
size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
{
size_t amount = 0;
amount += mTracks.ShallowSizeOfExcludingThis(aMallocSizeOf);
for (size_t i = 0; i < mTracks.Length(); i++) {
amount += mTracks[i]->SizeOfIncludingThis(aMallocSizeOf);
}
return amount;
}
/**
* Initialize the graph rate for use in calculating StreamTimes from track
* ticks. Called when a MediaStream's graph pointer is initialized.
*/
void InitGraphRate(TrackRate aGraphRate)
{
mGraphRate = aGraphRate;
#if DEBUG
MOZ_ASSERT(!mGraphRateIsSet);
mGraphRateIsSet = true;
#endif
}
TrackRate GraphRate() const
{
MOZ_ASSERT(mGraphRateIsSet);
return mGraphRate;
}
/**
* Takes ownership of aSegment. Don't do this while iterating, or while
* holding a Track reference.
* aSegment must have aStart worth of null data.
*/
Track& AddTrack(TrackID aID, StreamTime aStart, MediaSegment* aSegment)
{
NS_ASSERTION(!FindTrack(aID), "Track with this ID already exists");
Track* track = new Track(aID, aStart, aSegment);
mTracks.InsertElementSorted(track, CompareTracksByID());
mTracksDirty = true;
if (mTracksKnownTime == STREAM_TIME_MAX) {
// There exists code like
// http://mxr.mozilla.org/mozilla-central/source/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp?rev=96b197deb91e&mark=1292-1297#1292
NS_WARNING("Adding track to StreamTracks that should have no more tracks");
} else {
NS_ASSERTION(mTracksKnownTime <= aStart, "Start time too early");
}
return *track;
}
void AdvanceKnownTracksTime(StreamTime aKnownTime)
{
NS_ASSERTION(aKnownTime >= mTracksKnownTime, "Can't move tracks-known time earlier");
mTracksKnownTime = aKnownTime;
}
/**
* The end time for the StreamTracks is the latest time for which we have
* data for all tracks that haven't ended by that time.
*/
StreamTime GetEnd() const;
/**
* Returns the earliest time >= 0 at which all tracks have ended
* and all their data has been played out and no new tracks can be added,
* or STREAM_TIME_MAX if there is no such time.
*/
StreamTime GetAllTracksEnd() const;
#ifdef DEBUG
void DumpTrackInfo() const;
#endif
Track* FindTrack(TrackID aID);
class MOZ_STACK_CLASS TrackIter final
{
public:
/**
* Iterate through the tracks of aBuffer in order of ID.
*/
explicit TrackIter(const StreamTracks& aBuffer) :
mBuffer(&aBuffer.mTracks), mIndex(0), mMatchType(false) {}
/**
* Iterate through the tracks of aBuffer with type aType, in order of ID.
*/
TrackIter(const StreamTracks& aBuffer, MediaSegment::Type aType) :
mBuffer(&aBuffer.mTracks), mIndex(0), mType(aType), mMatchType(true) { FindMatch(); }
bool IsEnded() { return mIndex >= mBuffer->Length(); }
void Next()
{
++mIndex;
FindMatch();
}
Track* get() { return mBuffer->ElementAt(mIndex); }
Track& operator*() { return *mBuffer->ElementAt(mIndex); }
Track* operator->() { return mBuffer->ElementAt(mIndex); }
private:
void FindMatch()
{
if (!mMatchType)
return;
while (mIndex < mBuffer->Length() &&
mBuffer->ElementAt(mIndex)->GetType() != mType) {
++mIndex;
}
}
const nsTArray<nsAutoPtr<Track> >* mBuffer;
uint32_t mIndex;
MediaSegment::Type mType;
bool mMatchType;
};
friend class TrackIter;
/**
* Forget stream data before aTime; they will no longer be needed.
* Also can forget entire tracks that have ended at or before aTime.
* Can't be used to forget beyond GetEnd().
*/
void ForgetUpTo(StreamTime aTime);
/**
* Returns the latest time passed to ForgetUpTo.
*/
StreamTime GetForgottenDuration()
{
return mForgottenTime;
}
bool GetAndResetTracksDirty()
{
if (!mTracksDirty) {
return false;
}
mTracksDirty = false;
return true;
}
protected:
TrackRate mGraphRate; // StreamTime per second
// Any new tracks added will start at or after this time. In other words, the track
// list is complete and correct for all times less than this time.
StreamTime mTracksKnownTime;
StreamTime mForgottenTime;
private:
// All known tracks for this StreamTracks
nsTArray<nsAutoPtr<Track>> mTracks;
bool mTracksDirty;
#ifdef DEBUG
bool mGraphRateIsSet;
#endif
};
} // namespace mozilla
#endif /* MOZILLA_STREAMTRACKS_H_ */

Просмотреть файл

@ -87,14 +87,14 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
allHaveCurrentData = false; allHaveCurrentData = false;
} }
bool trackAdded = false; bool trackAdded = false;
for (StreamBuffer::TrackIter tracks(stream->GetStreamBuffer()); for (StreamTracks::TrackIter tracks(stream->GetStreamTracks());
!tracks.IsEnded(); tracks.Next()) { !tracks.IsEnded(); tracks.Next()) {
bool found = false; bool found = false;
for (uint32_t j = 0; j < mTrackMap.Length(); ++j) { for (uint32_t j = 0; j < mTrackMap.Length(); ++j) {
TrackMapEntry* map = &mTrackMap[j]; TrackMapEntry* map = &mTrackMap[j];
if (map->mInputPort == mInputs[i] && map->mInputTrackID == tracks->GetID()) { if (map->mInputPort == mInputs[i] && map->mInputTrackID == tracks->GetID()) {
bool trackFinished; bool trackFinished = false;
StreamBuffer::Track* outputTrack = mBuffer.FindTrack(map->mOutputTrackID); StreamTracks::Track* outputTrack = mTracks.FindTrack(map->mOutputTrackID);
found = true; found = true;
if (!outputTrack || outputTrack->IsEnded() || if (!outputTrack || outputTrack->IsEnded() ||
!mInputs[i]->PassTrackThrough(tracks->GetID())) { !mInputs[i]->PassTrackThrough(tracks->GetID())) {
@ -138,7 +138,7 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
// so we're finished now. // so we're finished now.
FinishOnGraphThread(); FinishOnGraphThread();
} else { } else {
mBuffer.AdvanceKnownTracksTime(GraphTimeToStreamTimeWithBlocking(aTo)); mTracks.AdvanceKnownTracksTime(GraphTimeToStreamTimeWithBlocking(aTo));
} }
if (allHaveCurrentData) { if (allHaveCurrentData) {
// We can make progress if we're not blocked // We can make progress if we're not blocked
@ -146,7 +146,7 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
} }
} }
uint32_t TrackUnionStream::AddTrack(MediaInputPort* aPort, StreamBuffer::Track* aTrack, uint32_t TrackUnionStream::AddTrack(MediaInputPort* aPort, StreamTracks::Track* aTrack,
GraphTime aFrom) GraphTime aFrom)
{ {
STREAM_LOG(LogLevel::Verbose, ("TrackUnionStream %p adding track %d for " STREAM_LOG(LogLevel::Verbose, ("TrackUnionStream %p adding track %d for "
@ -205,8 +205,8 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
aPort->GetSource(), aTrack->GetID()); aPort->GetSource(), aTrack->GetID());
} }
segment->AppendNullData(outputStart); segment->AppendNullData(outputStart);
StreamBuffer::Track* track = StreamTracks::Track* track =
&mBuffer.AddTrack(id, outputStart, segment.forget()); &mTracks.AddTrack(id, outputStart, segment.forget());
STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p added track %d for input stream %p track %d, start ticks %lld", STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p added track %d for input stream %p track %d, start ticks %lld",
this, track->GetID(), aPort->GetSource(), aTrack->GetID(), this, track->GetID(), aPort->GetSource(), aTrack->GetID(),
(long long)outputStart)); (long long)outputStart));
@ -246,7 +246,7 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
void TrackUnionStream::EndTrack(uint32_t aIndex) void TrackUnionStream::EndTrack(uint32_t aIndex)
{ {
StreamBuffer::Track* outputTrack = mBuffer.FindTrack(mTrackMap[aIndex].mOutputTrackID); StreamTracks::Track* outputTrack = mTracks.FindTrack(mTrackMap[aIndex].mOutputTrackID);
if (!outputTrack || outputTrack->IsEnded()) if (!outputTrack || outputTrack->IsEnded())
return; return;
STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p ending track %d", this, outputTrack->GetID())); STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p ending track %d", this, outputTrack->GetID()));
@ -269,12 +269,12 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
outputTrack->SetEnded(); outputTrack->SetEnded();
} }
void TrackUnionStream::CopyTrackData(StreamBuffer::Track* aInputTrack, void TrackUnionStream::CopyTrackData(StreamTracks::Track* aInputTrack,
uint32_t aMapIndex, GraphTime aFrom, GraphTime aTo, uint32_t aMapIndex, GraphTime aFrom, GraphTime aTo,
bool* aOutputTrackFinished) bool* aOutputTrackFinished)
{ {
TrackMapEntry* map = &mTrackMap[aMapIndex]; TrackMapEntry* map = &mTrackMap[aMapIndex];
StreamBuffer::Track* outputTrack = mBuffer.FindTrack(map->mOutputTrackID); StreamTracks::Track* outputTrack = mTracks.FindTrack(map->mOutputTrackID);
MOZ_ASSERT(outputTrack && !outputTrack->IsEnded(), "Can't copy to ended track"); MOZ_ASSERT(outputTrack && !outputTrack->IsEnded(), "Can't copy to ended track");
MediaSegment* segment = map->mSegment; MediaSegment* segment = map->mSegment;

Просмотреть файл

@ -41,7 +41,7 @@ protected:
// We keep track IDs instead of track pointers because // We keep track IDs instead of track pointers because
// tracks can be removed without us being notified (e.g. // tracks can be removed without us being notified (e.g.
// when a finished track is forgotten.) When we need a Track*, // when a finished track is forgotten.) When we need a Track*,
// we call StreamBuffer::FindTrack, which will return null if // we call StreamTracks::FindTrack, which will return null if
// the track has been deleted. // the track has been deleted.
TrackID mInputTrackID; TrackID mInputTrackID;
TrackID mOutputTrackID; TrackID mOutputTrackID;
@ -54,10 +54,10 @@ protected:
// Add the track to this stream, retaining its TrackID if it has never // Add the track to this stream, retaining its TrackID if it has never
// been previously used in this stream, allocating a new TrackID otherwise. // been previously used in this stream, allocating a new TrackID otherwise.
uint32_t AddTrack(MediaInputPort* aPort, StreamBuffer::Track* aTrack, uint32_t AddTrack(MediaInputPort* aPort, StreamTracks::Track* aTrack,
GraphTime aFrom); GraphTime aFrom);
void EndTrack(uint32_t aIndex); void EndTrack(uint32_t aIndex);
void CopyTrackData(StreamBuffer::Track* aInputTrack, void CopyTrackData(StreamTracks::Track* aInputTrack,
uint32_t aMapIndex, GraphTime aFrom, GraphTime aTo, uint32_t aMapIndex, GraphTime aFrom, GraphTime aTo,
bool* aOutputTrackFinished); bool* aOutputTrackFinished);

Просмотреть файл

@ -10,7 +10,7 @@
#include "AudioSegment.h" #include "AudioSegment.h"
#include "EncodedFrameContainer.h" #include "EncodedFrameContainer.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
#include "TrackMetadataBase.h" #include "TrackMetadataBase.h"
#include "VideoSegment.h" #include "VideoSegment.h"
#include "MediaStreamGraph.h" #include "MediaStreamGraph.h"

Просмотреть файл

@ -140,7 +140,7 @@ EXPORTS += [
'SeekTask.h', 'SeekTask.h',
'SelfRef.h', 'SelfRef.h',
'SharedBuffer.h', 'SharedBuffer.h',
'StreamBuffer.h', 'StreamTracks.h',
'ThreadPoolCOMListener.h', 'ThreadPoolCOMListener.h',
'TimeUnits.h', 'TimeUnits.h',
'TrackUnionStream.h', 'TrackUnionStream.h',
@ -244,7 +244,7 @@ UNIFIED_SOURCES += [
'RtspMediaResource.cpp', 'RtspMediaResource.cpp',
'SeekJob.cpp', 'SeekJob.cpp',
'SeekTask.cpp', 'SeekTask.cpp',
'StreamBuffer.cpp', 'StreamTracks.cpp',
'TextTrack.cpp', 'TextTrack.cpp',
'TextTrackCue.cpp', 'TextTrackCue.cpp',
'TextTrackCueList.cpp', 'TextTrackCueList.cpp',

Просмотреть файл

@ -152,9 +152,9 @@ AudioNodeExternalInputStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
MediaStream* source = mInputs[0]->GetSource(); MediaStream* source = mInputs[0]->GetSource();
AutoTArray<AudioSegment,1> audioSegments; AutoTArray<AudioSegment,1> audioSegments;
uint32_t inputChannels = 0; uint32_t inputChannels = 0;
for (StreamBuffer::TrackIter tracks(source->mBuffer, MediaSegment::AUDIO); for (StreamTracks::TrackIter tracks(source->mTracks, MediaSegment::AUDIO);
!tracks.IsEnded(); tracks.Next()) { !tracks.IsEnded(); tracks.Next()) {
const StreamBuffer::Track& inputTrack = *tracks; const StreamTracks::Track& inputTrack = *tracks;
if (!mInputs[0]->PassTrackThrough(tracks->GetID())) { if (!mInputs[0]->PassTrackThrough(tracks->GetID())) {
continue; continue;
} }

Просмотреть файл

@ -384,9 +384,9 @@ public:
void Run() override void Run() override
{ {
auto ns = static_cast<AudioNodeStream*>(mStream); auto ns = static_cast<AudioNodeStream*>(mStream);
ns->mBufferStartTime -= mAdvance; ns->mTracksStartTime -= mAdvance;
StreamBuffer::Track* track = ns->EnsureTrack(AUDIO_TRACK); StreamTracks::Track* track = ns->EnsureTrack(AUDIO_TRACK);
track->Get<AudioSegment>()->AppendNullData(mAdvance); track->Get<AudioSegment>()->AppendNullData(mAdvance);
ns->GraphImpl()->DecrementSuspendCount(mStream); ns->GraphImpl()->DecrementSuspendCount(mStream);
@ -631,9 +631,9 @@ AudioNodeStream::ProduceOutputBeforeInput(GraphTime aFrom)
void void
AudioNodeStream::AdvanceOutputSegment() AudioNodeStream::AdvanceOutputSegment()
{ {
StreamBuffer::Track* track = EnsureTrack(AUDIO_TRACK); StreamTracks::Track* track = EnsureTrack(AUDIO_TRACK);
// No more tracks will be coming // No more tracks will be coming
mBuffer.AdvanceKnownTracksTime(STREAM_TIME_MAX); mTracks.AdvanceKnownTracksTime(STREAM_TIME_MAX);
AudioSegment* segment = track->Get<AudioSegment>(); AudioSegment* segment = track->Get<AudioSegment>();
@ -656,7 +656,7 @@ AudioNodeStream::AdvanceOutputSegment()
void void
AudioNodeStream::FinishOutput() AudioNodeStream::FinishOutput()
{ {
StreamBuffer::Track* track = EnsureTrack(AUDIO_TRACK); StreamTracks::Track* track = EnsureTrack(AUDIO_TRACK);
track->SetEnded(); track->SetEnded();
for (uint32_t j = 0; j < mListeners.Length(); ++j) { for (uint32_t j = 0; j < mListeners.Length(); ++j) {

Просмотреть файл

@ -16,7 +16,7 @@
#include "MediaEngine.h" #include "MediaEngine.h"
#include "VideoSegment.h" #include "VideoSegment.h"
#include "AudioSegment.h" #include "AudioSegment.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
#include "MediaStreamGraph.h" #include "MediaStreamGraph.h"
#include "MediaTrackConstraints.h" #include "MediaTrackConstraints.h"
@ -134,7 +134,7 @@ public:
const PrincipalHandle& aPrincipalHandle) override const PrincipalHandle& aPrincipalHandle) override
{ {
#ifdef DEBUG #ifdef DEBUG
StreamBuffer::Track* data = aSource->FindTrack(aId); StreamTracks::Track* data = aSource->FindTrack(aId);
NS_WARN_IF_FALSE(!data || data->IsEnded() || NS_WARN_IF_FALSE(!data || data->IsEnded() ||
aDesiredTime <= aSource->GetEndOfAppendedData(aId), aDesiredTime <= aSource->GetEndOfAppendedData(aId),
"MediaEngineDefaultAudioSource data underrun"); "MediaEngineDefaultAudioSource data underrun");

Просмотреть файл

@ -24,7 +24,7 @@
#include "MediaEngineCameraVideoSource.h" #include "MediaEngineCameraVideoSource.h"
#include "VideoSegment.h" #include "VideoSegment.h"
#include "AudioSegment.h" #include "AudioSegment.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
#include "MediaStreamGraph.h" #include "MediaStreamGraph.h"
#include "MediaEngineWrapper.h" #include "MediaEngineWrapper.h"

Просмотреть файл

@ -27,7 +27,7 @@
#include "MediaEngineCameraVideoSource.h" #include "MediaEngineCameraVideoSource.h"
#include "VideoSegment.h" #include "VideoSegment.h"
#include "AudioSegment.h" #include "AudioSegment.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
#include "MediaStreamGraph.h" #include "MediaStreamGraph.h"
#include "cubeb/cubeb.h" #include "cubeb/cubeb.h"
#include "CubebUtils.h" #include "CubebUtils.h"

Просмотреть файл

@ -1783,7 +1783,7 @@ static void AddTrackAndListener(MediaStream* source,
completed_(completed) {} completed_(completed) {}
virtual void Run() override { virtual void Run() override {
StreamTime current_end = mStream->GetBufferEnd(); StreamTime current_end = mStream->GetTracksEnd();
TrackTicks current_ticks = TrackTicks current_ticks =
mStream->TimeToTicksRoundUp(track_rate_, current_end); mStream->TimeToTicksRoundUp(track_rate_, current_end);

Просмотреть файл

@ -21,7 +21,7 @@
#include "runnable_utils.h" #include "runnable_utils.h"
#include "transportflow.h" #include "transportflow.h"
#include "AudioPacketizer.h" #include "AudioPacketizer.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h" #include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"

Просмотреть файл

@ -32,7 +32,7 @@
#include "mozilla/ErrorResult.h" #include "mozilla/ErrorResult.h"
#include "mozilla/dom/PeerConnectionImplEnumsBinding.h" #include "mozilla/dom/PeerConnectionImplEnumsBinding.h"
#include "PrincipalChangeObserver.h" #include "PrincipalChangeObserver.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
#if !defined(MOZILLA_EXTERNAL_LINKAGE) #if !defined(MOZILLA_EXTERNAL_LINKAGE)
#include "mozilla/TimeStamp.h" #include "mozilla/TimeStamp.h"

Просмотреть файл

@ -22,7 +22,7 @@
#include "mozilla/Mutex.h" #include "mozilla/Mutex.h"
#include "AudioSegment.h" #include "AudioSegment.h"
#include "MediaSegment.h" #include "MediaSegment.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
#include "nsTArray.h" #include "nsTArray.h"
#include "nsIRunnable.h" #include "nsIRunnable.h"
#include "nsISupportsImpl.h" #include "nsISupportsImpl.h"

Просмотреть файл

@ -14,7 +14,7 @@
#include "mozilla/Mutex.h" #include "mozilla/Mutex.h"
#include "AudioSegment.h" #include "AudioSegment.h"
#include "MediaSegment.h" #include "MediaSegment.h"
#include "StreamBuffer.h" #include "StreamTracks.h"
#include "nsTArray.h" #include "nsTArray.h"
#include "nsIRunnable.h" #include "nsIRunnable.h"
#include "nsISupportsImpl.h" #include "nsISupportsImpl.h"