Bug 1189506. Make AudioContext responsible for tracking all nodes which need to be suspended and resumed. r=padenot

This simplifies MediaStreamGraph by removing the need for it to be aware
of which AudioContext a stream belongs to.

This also makes it easier to reuse stream suspending for purposes other than
AudioContext suspend/resume.

--HG--
extra : commitid : 9EmNxlrjVFO
extra : rebase_source : fee4b35d09c8f5dec76e41840d81423cde619886
This commit is contained in:
Robert O'Callahan 2015-09-16 16:15:21 +12:00
Родитель a320393b12
Коммит 65cc97b0f3
11 изменённых файлов: 152 добавлений и 131 удалений

Просмотреть файл

@ -86,18 +86,7 @@ void
MediaStreamGraphImpl::AddStreamGraphThread(MediaStream* aStream)
{
aStream->mBufferStartTime = mProcessedTime;
// Check if we're adding a stream to a suspended context, in which case, we
// add it to mSuspendedStreams
bool contextSuspended = false;
if (aStream->AsAudioNodeStream()) {
for (uint32_t i = 0; i < mSuspendedStreams.Length(); i++) {
if (aStream->AudioContextId() == mSuspendedStreams[i]->AudioContextId()) {
contextSuspended = true;
}
}
}
if (contextSuspended) {
if (aStream->IsSuspended()) {
mSuspendedStreams.AppendElement(aStream);
STREAM_LOG(LogLevel::Debug, ("Adding media stream %p to the graph, in the suspended stream array", aStream));
} else {
@ -126,8 +115,11 @@ MediaStreamGraphImpl::RemoveStreamGraphThread(MediaStream* aStream)
// Ensure that mFirstCycleBreaker and mMixer are updated when necessary.
SetStreamOrderDirty();
mStreams.RemoveElement(aStream);
mSuspendedStreams.RemoveElement(aStream);
if (aStream->IsSuspended()) {
mSuspendedStreams.RemoveElement(aStream);
} else {
mStreams.RemoveElement(aStream);
}
NS_RELEASE(aStream); // probably destroying it
@ -470,14 +462,6 @@ MediaStreamGraphImpl::MarkConsumed(MediaStream* aStream)
}
}
bool
MediaStreamGraphImpl::StreamSuspended(MediaStream* aStream)
{
// Only AudioNodeStreams can be suspended, so we can shortcut here.
return aStream->AsAudioNodeStream() &&
mSuspendedStreams.IndexOf(aStream) != mSuspendedStreams.NoIndex;
}
namespace {
// Value of mCycleMarker for unvisited streams in cycle detection.
const uint32_t NOT_VISITED = UINT32_MAX;
@ -608,7 +592,7 @@ MediaStreamGraphImpl::UpdateStreamOrder()
// Not-visited input streams should be processed first.
// SourceMediaStreams have already been ordered.
for (uint32_t i = inputs.Length(); i--; ) {
if (StreamSuspended(inputs[i]->mSource)) {
if (inputs[i]->mSource->IsSuspended()) {
continue;
}
auto input = inputs[i]->mSource->AsProcessedStream();
@ -634,7 +618,7 @@ MediaStreamGraphImpl::UpdateStreamOrder()
// unless it is part of the cycle.
uint32_t cycleStackMarker = 0;
for (uint32_t i = inputs.Length(); i--; ) {
if (StreamSuspended(inputs[i]->mSource)) {
if (inputs[i]->mSource->IsSuspended()) {
continue;
}
auto input = inputs[i]->mSource->AsProcessedStream();
@ -846,7 +830,7 @@ MediaStreamGraphImpl::RecomputeBlockingAt(const nsTArray<MediaStream*>& aStreams
continue;
}
if (StreamSuspended(stream)) {
if (stream->IsSuspended()) {
STREAM_LOG(LogLevel::Verbose, ("MediaStream %p is blocked due to being suspended", stream));
MarkStreamBlocking(stream);
continue;
@ -1880,6 +1864,7 @@ MediaStream::MediaStream(DOMMediaStream* aWrapper)
: mBufferStartTime(0)
, mExplicitBlockerCount(0)
, mBlocked(false)
, mSuspendedCount(0)
, mFinished(false)
, mNotifiedFinished(false)
, mNotifiedBlocked(false)
@ -3108,11 +3093,14 @@ MediaStreamGraph::CreateAudioCaptureStream(DOMMediaStream* aWrapper)
}
void
MediaStreamGraph::AddStream(MediaStream* aStream)
MediaStreamGraph::AddStream(MediaStream* aStream, uint32_t aFlags)
{
NS_ADDREF(aStream);
MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
aStream->SetGraphImpl(graph);
if (aFlags & ADD_STREAM_SUSPENDED) {
aStream->IncrementSuspendCount();
}
graph->AppendMessage(new CreateMessage(aStream));
}
@ -3189,26 +3177,8 @@ MediaStreamGraphImpl::ResetVisitedStreamState()
}
void
MediaStreamGraphImpl::StreamSetForAudioContext(dom::AudioContext::AudioContextId aAudioContextId,
mozilla::LinkedList<MediaStream>& aStreamSet)
{
nsTArray<MediaStream*>* runningAndSuspendedPair[2];
runningAndSuspendedPair[0] = &mStreams;
runningAndSuspendedPair[1] = &mSuspendedStreams;
for (uint32_t array = 0; array < 2; array++) {
for (uint32_t i = 0; i < runningAndSuspendedPair[array]->Length(); ++i) {
MediaStream* stream = (*runningAndSuspendedPair[array])[i];
if (aAudioContextId == stream->AudioContextId()) {
aStreamSet.insertFront(stream);
}
}
}
}
void
MediaStreamGraphImpl::MoveStreams(AudioContextOperation aAudioContextOperation,
mozilla::LinkedList<MediaStream>& aStreamSet)
MediaStreamGraphImpl::SuspendOrResumeStreams(AudioContextOperation aAudioContextOperation,
const nsTArray<MediaStream*>& aStreamSet)
{
// For our purpose, Suspend and Close are equivalent: we want to remove the
// streams from the set of streams that are going to be processed.
@ -3219,17 +3189,16 @@ MediaStreamGraphImpl::MoveStreams(AudioContextOperation aAudioContextOperation,
aAudioContextOperation == AudioContextOperation::Resume ? mStreams
: mSuspendedStreams;
MediaStream* stream;
while ((stream = aStreamSet.getFirst())) {
// It is posible to not find the stream here, if there has been two
// suspend/resume/close calls in a row.
for (MediaStream* stream : aStreamSet) {
auto i = from.IndexOf(stream);
if (i != from.NoIndex) {
from.RemoveElementAt(i);
to.AppendElement(stream);
MOZ_ASSERT(i != from.NoIndex);
from.RemoveElementAt(i);
to.AppendElement(stream);
if (aAudioContextOperation == AudioContextOperation::Resume) {
stream->DecrementSuspendCount();
} else {
stream->IncrementSuspendCount();
}
stream->remove();
}
STREAM_LOG(LogLevel::Debug, ("Moving streams between suspended and running"
"state: mStreams: %d, mSuspendedStreams: %d\n", mStreams.Length(),
@ -3270,22 +3239,17 @@ MediaStreamGraphImpl::AudioContextOperationCompleted(MediaStream* aStream,
}
void
MediaStreamGraphImpl::ApplyAudioContextOperationImpl(AudioNodeStream* aStream,
AudioContextOperation aOperation,
void* aPromise)
MediaStreamGraphImpl::ApplyAudioContextOperationImpl(
MediaStream* aDestinationStream, const nsTArray<MediaStream*>& aStreams,
AudioContextOperation aOperation, void* aPromise)
{
MOZ_ASSERT(CurrentDriver()->OnThread());
mozilla::LinkedList<MediaStream> streamSet;
SetStreamOrderDirty();
ResetVisitedStreamState();
StreamSetForAudioContext(aStream->AudioContextId(), streamSet);
MoveStreams(aOperation, streamSet);
MOZ_ASSERT(!streamSet.getFirst(),
"Streams should be removed from the list after having been moved.");
SuspendOrResumeStreams(aOperation, aStreams);
// If we have suspended the last AudioContext, and we don't have other
// streams that have audio, this graph will automatically switch to a
@ -3304,11 +3268,12 @@ MediaStreamGraphImpl::ApplyAudioContextOperationImpl(AudioNodeStream* aStream,
mMixer.AddCallback(driver);
CurrentDriver()->SwitchAtNextIteration(driver);
}
driver->EnqueueStreamAndPromiseForOperation(aStream, aPromise, aOperation);
driver->EnqueueStreamAndPromiseForOperation(aDestinationStream,
aPromise, aOperation);
} else {
// We are resuming a context, but we are already using an
// AudioCallbackDriver, we can resolve the promise now.
AudioContextOperationCompleted(aStream, aPromise, aOperation);
AudioContextOperationCompleted(aDestinationStream, aPromise, aOperation);
}
}
// Close, suspend: check if we are going to switch to a
@ -3330,7 +3295,8 @@ MediaStreamGraphImpl::ApplyAudioContextOperationImpl(AudioNodeStream* aStream,
}
if (!audioTrackPresent && CurrentDriver()->AsAudioCallbackDriver()) {
CurrentDriver()->AsAudioCallbackDriver()->
EnqueueStreamAndPromiseForOperation(aStream, aPromise, aOperation);
EnqueueStreamAndPromiseForOperation(aDestinationStream, aPromise,
aOperation);
SystemClockDriver* driver;
if (CurrentDriver()->NextDriver()) {
@ -3346,35 +3312,39 @@ MediaStreamGraphImpl::ApplyAudioContextOperationImpl(AudioNodeStream* aStream,
} else if (!audioTrackPresent && CurrentDriver()->Switching()) {
MOZ_ASSERT(CurrentDriver()->NextDriver()->AsAudioCallbackDriver());
CurrentDriver()->NextDriver()->AsAudioCallbackDriver()->
EnqueueStreamAndPromiseForOperation(aStream, aPromise, aOperation);
EnqueueStreamAndPromiseForOperation(aDestinationStream, aPromise,
aOperation);
} else {
// We are closing or suspending an AudioContext, but something else is
// using the audio stream, we can resolve the promise now.
AudioContextOperationCompleted(aStream, aPromise, aOperation);
AudioContextOperationCompleted(aDestinationStream, aPromise, aOperation);
}
}
}
void
MediaStreamGraph::ApplyAudioContextOperation(AudioNodeStream* aNodeStream,
MediaStreamGraph::ApplyAudioContextOperation(MediaStream* aDestinationStream,
const nsTArray<MediaStream*>& aStreams,
AudioContextOperation aOperation,
void* aPromise)
{
class AudioContextOperationControlMessage : public ControlMessage
{
public:
AudioContextOperationControlMessage(AudioNodeStream* aStream,
AudioContextOperationControlMessage(MediaStream* aDestinationStream,
const nsTArray<MediaStream*>& aStreams,
AudioContextOperation aOperation,
void* aPromise)
: ControlMessage(aStream)
: ControlMessage(aDestinationStream)
, mStreams(aStreams)
, mAudioContextOperation(aOperation)
, mPromise(aPromise)
{
}
virtual void Run()
{
mStream->GraphImpl()->ApplyAudioContextOperationImpl(
mStream->AsAudioNodeStream(), mAudioContextOperation, mPromise);
mStream->GraphImpl()->ApplyAudioContextOperationImpl(mStream,
mStreams, mAudioContextOperation, mPromise);
}
virtual void RunDuringShutdown()
{
@ -3382,13 +3352,17 @@ MediaStreamGraph::ApplyAudioContextOperation(AudioNodeStream* aNodeStream,
}
private:
// We don't need strong references here for the same reason ControlMessage
// doesn't.
nsTArray<MediaStream*> mStreams;
AudioContextOperation mAudioContextOperation;
void* mPromise;
};
MediaStreamGraphImpl* graphImpl = static_cast<MediaStreamGraphImpl*>(this);
graphImpl->AppendMessage(
new AudioContextOperationControlMessage(aNodeStream, aOperation, aPromise));
new AudioContextOperationControlMessage(aDestinationStream, aStreams,
aOperation, aPromise));
}
bool

Просмотреть файл

@ -319,7 +319,6 @@ public:
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaStream)
explicit MediaStream(DOMMediaStream* aWrapper);
virtual dom::AudioContext::AudioContextId AudioContextId() const { return 0; }
protected:
// Protected destructor, to discourage deletion outside of Release():
@ -583,6 +582,14 @@ public:
void SetAudioChannelType(dom::AudioChannel aType) { mAudioChannelType = aType; }
dom::AudioChannel AudioChannelType() const { return mAudioChannelType; }
bool IsSuspended() { return mSuspendedCount > 0; }
void IncrementSuspendCount() { ++mSuspendedCount; }
void DecrementSuspendCount()
{
NS_ASSERTION(mSuspendedCount > 0, "Suspend count underrun");
--mSuspendedCount;
}
protected:
void AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime, GraphTime aBlockedTime)
{
@ -670,6 +677,12 @@ protected:
};
nsTArray<AudioOutputStream> mAudioOutputStreams;
/**
* Number of outstanding suspend operations on this stream. Stream is
* suspended when this is > 0.
*/
int32_t mSuspendedCount;
/**
* When true, this means the stream will be finished once all
* buffered data has been consumed.
@ -1253,25 +1266,31 @@ public:
*/
ProcessedMediaStream* CreateAudioCaptureStream(DOMMediaStream* aWrapper);
enum {
ADD_STREAM_SUSPENDED = 0x01
};
/**
* Add a new stream to the graph. Main thread.
*/
void AddStream(MediaStream* aStream);
void AddStream(MediaStream* aStream, uint32_t aFlags = 0);
/* From the main thread, ask the MSG to send back an event when the graph
* thread is running, and audio is being processed. */
void NotifyWhenGraphStarted(AudioNodeStream* aNodeStream);
/* From the main thread, suspend, resume or close an AudioContext.
* aNodeStream is the stream of the DestinationNode of the AudioContext.
* aStreams are the streams of all the AudioNodes of the AudioContext that
* need to be suspended or resumed. This can be empty if this is a second
* consecutive suspend call and all the nodes are already suspended.
*
* This can possibly pause the graph thread, releasing system resources, if
* all streams have been suspended/closed.
*
* When the operation is complete, aPromise is resolved.
*/
void ApplyAudioContextOperation(AudioNodeStream* aNodeStream,
void ApplyAudioContextOperation(MediaStream* aDestinationStream,
const nsTArray<MediaStream*>& aStreams,
dom::AudioContextOperation aState,
void * aPromise);
void* aPromise);
bool IsNonRealtime() const;
/**

Просмотреть файл

@ -255,13 +255,6 @@ public:
*/
static void MarkConsumed(MediaStream* aStream);
/**
* Given the Id of an AudioContext, return the set of all MediaStreams that
* are part of this context.
*/
void StreamSetForAudioContext(dom::AudioContext::AudioContextId aAudioContextId,
mozilla::LinkedList<MediaStream>& aStreamSet);
/**
* Called when a suspend/resume/close operation has been completed, on the
* graph thread.
@ -274,7 +267,8 @@ public:
* Apply and AudioContext operation (suspend/resume/closed), on the graph
* thread.
*/
void ApplyAudioContextOperationImpl(AudioNodeStream* aStream,
void ApplyAudioContextOperationImpl(MediaStream* aDestinationStream,
const nsTArray<MediaStream*>& aStreams,
dom::AudioContextOperation aOperation,
void* aPromise);
@ -282,8 +276,8 @@ public:
* Move streams from the mStreams to mSuspendedStream if suspending/closing an
* AudioContext, or the inverse when resuming an AudioContext.
*/
void MoveStreams(dom::AudioContextOperation aAudioContextOperation,
mozilla::LinkedList<MediaStream>& aStreamSet);
void SuspendOrResumeStreams(dom::AudioContextOperation aAudioContextOperation,
const nsTArray<MediaStream*>& aStreamSet);
/*
* Reset some state about the streams before suspending them, or resuming
@ -291,12 +285,6 @@ public:
*/
void ResetVisitedStreamState();
/*
* True if a stream is suspended, that is, is not in mStreams, but in
* mSuspendedStream.
*/
bool StreamSuspended(MediaStream* aStream);
/**
* Sort mStreams so that every stream not in a cycle is after any streams
* it depends on, and every stream in a cycle is marked as being in a cycle.
@ -605,6 +593,10 @@ public:
* and are therefore not doing any processing.
*/
nsTArray<MediaStream*> mSuspendedStreams;
/**
* Suspended AudioContext IDs
*/
nsTHashtable<nsUint64HashKey> mSuspendedContexts;
/**
* Streams from mFirstCycleBreaker to the end of mStreams produce output
* before they receive input. They correspond to DelayNodes that are in

Просмотреть файл

@ -269,7 +269,7 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
} else if (InMutedCycle()) {
segment->AppendNullData(ticks);
} else {
if (GraphImpl()->StreamSuspended(source)) {
if (source->IsSuspended()) {
segment->AppendNullData(aTo - aFrom);
} else {
MOZ_ASSERT(outputTrack->GetEnd() == GraphTimeToStreamTime(interval.mStart),

Просмотреть файл

@ -102,6 +102,7 @@ AudioContext::AudioContext(nsPIDOMWindow* aWindow,
, mIsStarted(!aIsOffline)
, mIsShutDown(false)
, mCloseCalled(false)
, mSuspendCalled(false)
{
bool mute = aWindow->AddAudioContext(this);
@ -824,6 +825,19 @@ AudioContext::OnStateChanged(void* aPromise, AudioContextState aNewState)
mAudioContextState = aNewState;
}
nsTArray<MediaStream*>
AudioContext::GetAllStreams() const
{
nsTArray<MediaStream*> streams;
for (auto iter = mAllNodes.ConstIter(); !iter.Done(); iter.Next()) {
MediaStream* s = iter.Get()->GetKey()->GetStream();
if (s) {
streams.AppendElement(s);
}
}
return streams;
}
already_AddRefed<Promise>
AudioContext::Suspend(ErrorResult& aRv)
{
@ -852,9 +866,21 @@ AudioContext::Suspend(ErrorResult& aRv)
Destination()->Suspend();
mPromiseGripArray.AppendElement(promise);
nsTArray<MediaStream*> streams;
// If mSuspendCalled is true then we already suspended all our streams,
// so don't suspend them again (since suspend(); suspend(); resume(); should
// cancel both suspends). But we still need to do ApplyAudioContextOperation
// to ensure our new promise is resolved.
if (!mSuspendCalled) {
streams = GetAllStreams();
}
Graph()->ApplyAudioContextOperation(DestinationStream()->AsAudioNodeStream(),
streams,
AudioContextOperation::Suspend, promise);
mSuspendCalled = true;
return promise.forget();
}
@ -886,10 +912,21 @@ AudioContext::Resume(ErrorResult& aRv)
Destination()->Resume();
nsTArray<MediaStream*> streams;
// If mSuspendCalled is false then we already resumed all our streams,
// so don't resume them again (since suspend(); resume(); resume(); should
// be OK). But we still need to do ApplyAudioContextOperation
// to ensure our new promise is resolved.
if (mSuspendCalled) {
streams = GetAllStreams();
}
mPromiseGripArray.AppendElement(promise);
Graph()->ApplyAudioContextOperation(DestinationStream()->AsAudioNodeStream(),
streams,
AudioContextOperation::Resume, promise);
mSuspendCalled = false;
return promise.forget();
}
@ -913,8 +950,6 @@ AudioContext::Close(ErrorResult& aRv)
return promise.forget();
}
mCloseCalled = true;
if (Destination()) {
Destination()->DestroyAudioChannelAgent();
}
@ -925,9 +960,18 @@ AudioContext::Close(ErrorResult& aRv)
// this point, so we need extra null-checks.
MediaStream* ds = DestinationStream();
if (ds) {
Graph()->ApplyAudioContextOperation(ds->AsAudioNodeStream(),
nsTArray<MediaStream*> streams;
// If mSuspendCalled or mCloseCalled are true then we already suspended
// all our streams, so don't suspend them again. But we still need to do
// ApplyAudioContextOperation to ensure our new promise is resolved.
if (!mSuspendCalled && !mCloseCalled) {
streams = GetAllStreams();
}
Graph()->ApplyAudioContextOperation(ds->AsAudioNodeStream(), streams,
AudioContextOperation::Close, promise);
}
mCloseCalled = true;
return promise.forget();
}

Просмотреть файл

@ -175,10 +175,7 @@ public:
return mSampleRate;
}
AudioContextId Id() const
{
return mId;
}
bool ShouldSuspendNewStream() const { return mSuspendCalled; }
double CurrentTime() const;
@ -345,6 +342,8 @@ private:
bool CheckClosed(ErrorResult& aRv);
nsTArray<MediaStream*> GetAllStreams() const;
private:
// Each AudioContext has an id, that is passed down the MediaStreams that
// back the AudioNodes, so we can easily compute the set of all the
@ -377,6 +376,8 @@ private:
bool mIsShutDown;
// Close has been called, reject suspend and resume call.
bool mCloseCalled;
// Suspend has been called with no following resume.
bool mSuspendCalled;
};
static const dom::AudioContext::AudioContextId NO_AUDIO_CONTEXT = 0;

Просмотреть файл

@ -177,7 +177,7 @@ public:
};
// Returns the stream, if any.
AudioNodeStream* GetStream() { return mStream; }
AudioNodeStream* GetStream() const { return mStream; }
const nsTArray<InputNode>& InputNodes() const
{

Просмотреть файл

@ -12,8 +12,8 @@ using namespace mozilla::dom;
namespace mozilla {
AudioNodeExternalInputStream::AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate, uint32_t aContextId)
: AudioNodeStream(aEngine, NO_STREAM_FLAGS, aSampleRate, aContextId)
AudioNodeExternalInputStream::AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate)
: AudioNodeStream(aEngine, NO_STREAM_FLAGS, aSampleRate)
{
MOZ_COUNT_CTOR(AudioNodeExternalInputStream);
}
@ -27,13 +27,14 @@ AudioNodeExternalInputStream::~AudioNodeExternalInputStream()
AudioNodeExternalInputStream::Create(MediaStreamGraph* aGraph,
AudioNodeEngine* aEngine)
{
AudioContext* ctx = aEngine->NodeMainThread()->Context();
MOZ_ASSERT(NS_IsMainThread());
MOZ_ASSERT(aGraph->GraphRate() == aEngine->NodeMainThread()->Context()->SampleRate());
MOZ_ASSERT(aGraph->GraphRate() == ctx->SampleRate());
nsRefPtr<AudioNodeExternalInputStream> stream =
new AudioNodeExternalInputStream(aEngine, aGraph->GraphRate(),
aEngine->NodeMainThread()->Context()->Id());
aGraph->AddStream(stream);
new AudioNodeExternalInputStream(aEngine, aGraph->GraphRate());
aGraph->AddStream(stream,
ctx->ShouldSuspendNewStream() ? MediaStreamGraph::ADD_STREAM_SUSPENDED : 0);
return stream.forget();
}

Просмотреть файл

@ -25,8 +25,7 @@ public:
Create(MediaStreamGraph* aGraph, AudioNodeEngine* aEngine);
protected:
AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate,
uint32_t aContextId);
AudioNodeExternalInputStream(AudioNodeEngine* aEngine, TrackRate aSampleRate);
~AudioNodeExternalInputStream();
public:

Просмотреть файл

@ -27,12 +27,10 @@ namespace mozilla {
AudioNodeStream::AudioNodeStream(AudioNodeEngine* aEngine,
Flags aFlags,
TrackRate aSampleRate,
AudioContext::AudioContextId aContextId)
TrackRate aSampleRate)
: ProcessedMediaStream(nullptr),
mEngine(aEngine),
mSampleRate(aSampleRate),
mAudioContextId(aContextId),
mFlags(aFlags),
mNumberOfInputChannels(2),
mMarkAsFinishedAfterThisBlock(false),
@ -74,17 +72,15 @@ AudioNodeStream::Create(AudioContext* aCtx, AudioNodeEngine* aEngine,
MediaStreamGraph* graph = aGraph ? aGraph : aCtx->Graph();
MOZ_ASSERT(graph->GraphRate() == aCtx->SampleRate());
dom::AudioContext::AudioContextId contextIdForStream = node ? node->Context()->Id() :
NO_AUDIO_CONTEXT;
nsRefPtr<AudioNodeStream> stream =
new AudioNodeStream(aEngine, aFlags, graph->GraphRate(),
contextIdForStream);
if (aEngine->HasNode()) {
stream->SetChannelMixingParametersImpl(aEngine->NodeMainThread()->ChannelCount(),
aEngine->NodeMainThread()->ChannelCountModeValue(),
aEngine->NodeMainThread()->ChannelInterpretationValue());
new AudioNodeStream(aEngine, aFlags, graph->GraphRate());
if (node) {
stream->SetChannelMixingParametersImpl(node->ChannelCount(),
node->ChannelCountModeValue(),
node->ChannelInterpretationValue());
}
graph->AddStream(stream);
graph->AddStream(stream,
aCtx->ShouldSuspendNewStream() ? MediaStreamGraph::ADD_STREAM_SUSPENDED : 0);
return stream.forget();
}

Просмотреть файл

@ -71,8 +71,7 @@ protected:
*/
AudioNodeStream(AudioNodeEngine* aEngine,
Flags aFlags,
TrackRate aSampleRate,
AudioContext::AudioContextId aContextId);
TrackRate aSampleRate);
~AudioNodeStream();
@ -144,7 +143,6 @@ public:
// Any thread
AudioNodeEngine* Engine() { return mEngine; }
TrackRate SampleRate() const { return mSampleRate; }
AudioContext::AudioContextId AudioContextId() const override { return mAudioContextId; }
/**
* Convert a time in seconds on the destination stream to ticks
@ -196,9 +194,6 @@ protected:
OutputChunks mLastChunks;
// The stream's sampling rate
const TrackRate mSampleRate;
// This is necessary to be able to find all the nodes for a given
// AudioContext. It is set on the main thread, in the constructor.
const AudioContext::AudioContextId mAudioContextId;
// Whether this is an internal or external stream
const Flags mFlags;
// The number of input channels that this stream requires. 0 means don't care.