diff --git a/content/html/content/src/nsHTMLMediaElement.cpp b/content/html/content/src/nsHTMLMediaElement.cpp
index e16922331fcb..65c054d648e7 100644
--- a/content/html/content/src/nsHTMLMediaElement.cpp
+++ b/content/html/content/src/nsHTMLMediaElement.cpp
@@ -2450,13 +2450,13 @@ public:
} else {
event = NS_NewRunnableMethod(this, &StreamListener::DoNotifyUnblocked);
}
- aGraph->DispatchToMainThreadAfterStreamStateUpdate(event.forget());
+ aGraph->DispatchToMainThreadAfterStreamStateUpdate(event);
}
virtual void NotifyFinished(MediaStreamGraph* aGraph)
{
nsCOMPtr event =
NS_NewRunnableMethod(this, &StreamListener::DoNotifyFinished);
- aGraph->DispatchToMainThreadAfterStreamStateUpdate(event.forget());
+ aGraph->DispatchToMainThreadAfterStreamStateUpdate(event);
}
virtual void NotifyHasCurrentData(MediaStreamGraph* aGraph,
bool aHasCurrentData)
@@ -2471,7 +2471,7 @@ public:
if (aHasCurrentData) {
nsCOMPtr event =
NS_NewRunnableMethod(this, &StreamListener::DoNotifyHaveCurrentData);
- aGraph->DispatchToMainThreadAfterStreamStateUpdate(event.forget());
+ aGraph->DispatchToMainThreadAfterStreamStateUpdate(event);
}
}
virtual void NotifyOutput(MediaStreamGraph* aGraph)
@@ -2482,7 +2482,7 @@ public:
mPendingNotifyOutput = true;
nsCOMPtr event =
NS_NewRunnableMethod(this, &StreamListener::DoNotifyOutput);
- aGraph->DispatchToMainThreadAfterStreamStateUpdate(event.forget());
+ aGraph->DispatchToMainThreadAfterStreamStateUpdate(event);
}
private:
diff --git a/content/media/AudioNodeEngine.cpp b/content/media/AudioNodeEngine.cpp
deleted file mode 100644
index 1fd4f804cb89..000000000000
--- a/content/media/AudioNodeEngine.cpp
+++ /dev/null
@@ -1,71 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "AudioNodeEngine.h"
-
-namespace mozilla {
-
-void
-AllocateAudioBlock(uint32_t aChannelCount, AudioChunk* aChunk)
-{
- // XXX for SIMD purposes we should do something here to make sure the
- // channel buffers are 16-byte aligned.
- nsRefPtr buffer =
- SharedBuffer::Create(WEBAUDIO_BLOCK_SIZE*aChannelCount*sizeof(float));
- aChunk->mDuration = WEBAUDIO_BLOCK_SIZE;
- aChunk->mChannelData.SetLength(aChannelCount);
- float* data = static_cast(buffer->Data());
- for (uint32_t i = 0; i < aChannelCount; ++i) {
- aChunk->mChannelData[i] = data + i*WEBAUDIO_BLOCK_SIZE;
- }
- aChunk->mBuffer = buffer.forget();
- aChunk->mVolume = 1.0f;
- aChunk->mBufferFormat = AUDIO_FORMAT_FLOAT32;
-}
-
-void
-WriteZeroesToAudioBlock(AudioChunk* aChunk, uint32_t aStart, uint32_t aLength)
-{
- MOZ_ASSERT(aStart + aLength <= WEBAUDIO_BLOCK_SIZE);
- if (aLength == 0)
- return;
- for (uint32_t i = 0; i < aChunk->mChannelData.Length(); ++i) {
- memset(static_cast(const_cast(aChunk->mChannelData[i])) + aStart,
- 0, aLength*sizeof(float));
- }
-}
-
-void
-AudioBlockAddChannelWithScale(const float aInput[WEBAUDIO_BLOCK_SIZE],
- float aScale,
- float aOutput[WEBAUDIO_BLOCK_SIZE])
-{
- if (aScale == 1.0f) {
- for (uint32_t i = 0; i < WEBAUDIO_BLOCK_SIZE; ++i) {
- aOutput[i] += aInput[i];
- }
- } else {
- for (uint32_t i = 0; i < WEBAUDIO_BLOCK_SIZE; ++i) {
- aOutput[i] += aInput[i]*aScale;
- }
- }
-}
-
-void
-AudioBlockCopyChannelWithScale(const float aInput[WEBAUDIO_BLOCK_SIZE],
- float aScale,
- float aOutput[WEBAUDIO_BLOCK_SIZE])
-{
- if (aScale == 1.0f) {
- memcpy(aOutput, aInput, WEBAUDIO_BLOCK_SIZE*sizeof(float));
- } else {
- for (uint32_t i = 0; i < WEBAUDIO_BLOCK_SIZE; ++i) {
- aOutput[i] = aInput[i]*aScale;
- }
- }
-}
-
-}
diff --git a/content/media/AudioNodeEngine.h b/content/media/AudioNodeEngine.h
deleted file mode 100644
index 83c1486cd50b..000000000000
--- a/content/media/AudioNodeEngine.h
+++ /dev/null
@@ -1,149 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
-/* vim:set ts=2 sw=2 sts=2 et cindent: */
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this
- * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
-#ifndef MOZILLA_AUDIONODEENGINE_H_
-#define MOZILLA_AUDIONODEENGINE_H_
-
-#include "AudioSegment.h"
-
-namespace mozilla {
-
-class AudioNodeStream;
-
-// We ensure that the graph advances in steps that are multiples of the Web
-// Audio block size
-const uint32_t WEBAUDIO_BLOCK_SIZE_BITS = 7;
-const uint32_t WEBAUDIO_BLOCK_SIZE = 1 << WEBAUDIO_BLOCK_SIZE_BITS;
-
-/**
- * This class holds onto a set of immutable channel buffers. The storage
- * for the buffers must be malloced, but the buffer pointers and the malloc
- * pointers can be different (e.g. if the buffers are contained inside
- * some malloced object).
- */
-class ThreadSharedFloatArrayBufferList : public ThreadSharedObject {
-public:
- /**
- * Construct with null data.
- */
- ThreadSharedFloatArrayBufferList(uint32_t aCount)
- {
- mContents.SetLength(aCount);
- }
-
- struct Storage {
- Storage()
- {
- mDataToFree = nullptr;
- mSampleData = nullptr;
- }
- ~Storage() { free(mDataToFree); }
- void* mDataToFree;
- const float* mSampleData;
- };
-
- /**
- * This can be called on any thread.
- */
- uint32_t GetChannels() const { return mContents.Length(); }
- /**
- * This can be called on any thread.
- */
- const float* GetData(uint32_t aIndex) const { return mContents[aIndex].mSampleData; }
-
- /**
- * Call this only during initialization, before the object is handed to
- * any other thread.
- */
- void SetData(uint32_t aIndex, void* aDataToFree, const float* aData)
- {
- Storage* s = &mContents[aIndex];
- free(s->mDataToFree);
- s->mDataToFree = aDataToFree;
- s->mSampleData = aData;
- }
-
- /**
- * Put this object into an error state where there are no channels.
- */
- void Clear() { mContents.Clear(); }
-
-private:
- AutoFallibleTArray mContents;
-};
-
-/**
- * Allocates an AudioChunk with fresh buffers of WEBAUDIO_BLOCK_SIZE float samples.
- * AudioChunk::mChannelData's entries can be cast to float* for writing.
- */
-void AllocateAudioBlock(uint32_t aChannelCount, AudioChunk* aChunk);
-
-/**
- * aChunk must have been allocated by AllocateAudioBlock.
- */
-void WriteZeroesToAudioBlock(AudioChunk* aChunk, uint32_t aStart, uint32_t aLength);
-
-/**
- * Pointwise multiply-add operation. aScale == 1.0f should be optimized.
- */
-void AudioBlockAddChannelWithScale(const float aInput[WEBAUDIO_BLOCK_SIZE],
- float aScale,
- float aOutput[WEBAUDIO_BLOCK_SIZE]);
-
-/**
- * Pointwise copy-scaled operation. aScale == 1.0f should be optimized.
- */
-void AudioBlockCopyChannelWithScale(const float aInput[WEBAUDIO_BLOCK_SIZE],
- float aScale,
- float aOutput[WEBAUDIO_BLOCK_SIZE]);
-
-/**
- * All methods of this class and its subclasses are called on the
- * MediaStreamGraph thread.
- */
-class AudioNodeEngine {
-public:
- AudioNodeEngine() {}
- virtual ~AudioNodeEngine() {}
-
- virtual void SetStreamTimeParameter(uint32_t aIndex, TrackTicks aParam)
- {
- NS_ERROR("Invalid SetStreamTimeParameter index");
- }
- virtual void SetDoubleParameter(uint32_t aIndex, double aParam)
- {
- NS_ERROR("Invalid SetDoubleParameter index");
- }
- virtual void SetInt32Parameter(uint32_t aIndex, int32_t aParam)
- {
- NS_ERROR("Invalid SetInt32Parameter index");
- }
- virtual void SetBuffer(already_AddRefed aBuffer)
- {
- NS_ERROR("SetBuffer called on engine that doesn't support it");
- }
-
- /**
- * Produce the next block of audio samples, given input samples aInput
- * (the mixed data for input 0).
- * By default, simply returns the mixed input.
- * aInput is guaranteed to have float sample format (if it has samples at all)
- * and to have been resampled to IdealAudioRate(), and to have exactly
- * WEBAUDIO_BLOCK_SIZE samples.
- * *aFinished is set to false by the caller. If the callee sets it to true,
- * we'll finish the stream and not call this again.
- */
- virtual void ProduceAudioBlock(AudioNodeStream* aStream,
- const AudioChunk& aInput,
- AudioChunk* aOutput,
- bool* aFinished)
- {
- *aOutput = aInput;
- }
-};
-
-}
-
-#endif /* MOZILLA_AUDIONODEENGINE_H_ */
diff --git a/content/media/AudioNodeStream.cpp b/content/media/AudioNodeStream.cpp
deleted file mode 100644
index 8789d28c5eac..000000000000
--- a/content/media/AudioNodeStream.cpp
+++ /dev/null
@@ -1,270 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "AudioNodeStream.h"
-
-#include "MediaStreamGraphImpl.h"
-#include "AudioNodeEngine.h"
-
-using namespace mozilla::dom;
-
-namespace mozilla {
-
-/**
- * An AudioNodeStream produces a single audio track with ID
- * AUDIO_NODE_STREAM_TRACK_ID. This track has rate IdealAudioRate().
- * Each chunk in the track is a single block of WEBAUDIO_BLOCK_SIZE samples.
- */
-static const int AUDIO_NODE_STREAM_TRACK_ID = 1;
-
-AudioNodeStream::~AudioNodeStream()
-{
-}
-
-void
-AudioNodeStream::SetStreamTimeParameter(uint32_t aIndex, MediaStream* aRelativeToStream,
- double aStreamTime)
-{
- class Message : public ControlMessage {
- public:
- Message(AudioNodeStream* aStream, uint32_t aIndex, MediaStream* aRelativeToStream,
- double aStreamTime)
- : ControlMessage(aStream), mStreamTime(aStreamTime),
- mRelativeToStream(aRelativeToStream), mIndex(aIndex) {}
- virtual void Run()
- {
- static_cast(mStream)->
- SetStreamTimeParameterImpl(mIndex, mRelativeToStream, mStreamTime);
- }
- double mStreamTime;
- MediaStream* mRelativeToStream;
- uint32_t mIndex;
- };
-
- MOZ_ASSERT(this);
- GraphImpl()->AppendMessage(new Message(this, aIndex, aRelativeToStream, aStreamTime));
-}
-
-void
-AudioNodeStream::SetStreamTimeParameterImpl(uint32_t aIndex, MediaStream* aRelativeToStream,
- double aStreamTime)
-{
- StreamTime streamTime = std::max(0, SecondsToMediaTime(aStreamTime));
- GraphTime graphTime = aRelativeToStream->StreamTimeToGraphTime(streamTime);
- StreamTime thisStreamTime = GraphTimeToStreamTimeOptimistic(graphTime);
- TrackTicks ticks = TimeToTicksRoundDown(IdealAudioRate(), thisStreamTime);
- mEngine->SetStreamTimeParameter(aIndex, ticks);
-}
-
-void
-AudioNodeStream::SetDoubleParameter(uint32_t aIndex, double aValue)
-{
- class Message : public ControlMessage {
- public:
- Message(AudioNodeStream* aStream, uint32_t aIndex, double aValue)
- : ControlMessage(aStream), mValue(aValue), mIndex(aIndex) {}
- virtual void Run()
- {
- static_cast(mStream)->Engine()->
- SetDoubleParameter(mIndex, mValue);
- }
- double mValue;
- uint32_t mIndex;
- };
-
- MOZ_ASSERT(this);
- GraphImpl()->AppendMessage(new Message(this, aIndex, aValue));
-}
-
-void
-AudioNodeStream::SetInt32Parameter(uint32_t aIndex, int32_t aValue)
-{
- class Message : public ControlMessage {
- public:
- Message(AudioNodeStream* aStream, uint32_t aIndex, int32_t aValue)
- : ControlMessage(aStream), mValue(aValue), mIndex(aIndex) {}
- virtual void Run()
- {
- static_cast(mStream)->Engine()->
- SetInt32Parameter(mIndex, mValue);
- }
- int32_t mValue;
- uint32_t mIndex;
- };
-
- MOZ_ASSERT(this);
- GraphImpl()->AppendMessage(new Message(this, aIndex, aValue));
-}
-
-void
-AudioNodeStream::SetBuffer(already_AddRefed aBuffer)
-{
- class Message : public ControlMessage {
- public:
- Message(AudioNodeStream* aStream,
- already_AddRefed aBuffer)
- : ControlMessage(aStream), mBuffer(aBuffer) {}
- virtual void Run()
- {
- static_cast(mStream)->Engine()->
- SetBuffer(mBuffer.forget());
- }
- nsRefPtr mBuffer;
- };
-
- MOZ_ASSERT(this);
- GraphImpl()->AppendMessage(new Message(this, aBuffer));
-}
-
-StreamBuffer::Track*
-AudioNodeStream::EnsureTrack()
-{
- StreamBuffer::Track* track = mBuffer.FindTrack(AUDIO_NODE_STREAM_TRACK_ID);
- if (!track) {
- nsAutoPtr segment(new AudioSegment());
- for (uint32_t j = 0; j < mListeners.Length(); ++j) {
- MediaStreamListener* l = mListeners[j];
- l->NotifyQueuedTrackChanges(Graph(), AUDIO_NODE_STREAM_TRACK_ID, IdealAudioRate(), 0,
- MediaStreamListener::TRACK_EVENT_CREATED,
- *segment);
- }
- track = &mBuffer.AddTrack(AUDIO_NODE_STREAM_TRACK_ID, IdealAudioRate(), 0, segment.forget());
- }
- return track;
-}
-
-AudioChunk*
-AudioNodeStream::ObtainInputBlock(AudioChunk* aTmpChunk)
-{
- uint32_t inputCount = mInputs.Length();
- uint32_t outputChannelCount = 0;
- nsAutoTArray inputChunks;
- for (uint32_t i = 0; i < inputCount; ++i) {
- MediaStream* s = mInputs[i]->GetSource();
- AudioNodeStream* a = s->AsAudioNodeStream();
- MOZ_ASSERT(a);
- if (a->IsFinishedOnGraphThread()) {
- continue;
- }
- AudioChunk* chunk = a->mLastChunk;
- // XXX when we implement DelayNode, this will no longer be true and we'll
- // need to treat a null chunk (when the DelayNode hasn't had a chance
- // to produce data yet) as silence here.
- MOZ_ASSERT(chunk);
- if (chunk->IsNull()) {
- continue;
- }
-
- inputChunks.AppendElement(chunk);
- outputChannelCount =
- GetAudioChannelsSuperset(outputChannelCount, chunk->mChannelData.Length());
- }
-
- uint32_t inputChunkCount = inputChunks.Length();
- if (inputChunkCount == 0) {
- aTmpChunk->SetNull(WEBAUDIO_BLOCK_SIZE);
- return aTmpChunk;
- }
-
- if (inputChunkCount == 1) {
- return inputChunks[0];
- }
-
- AllocateAudioBlock(outputChannelCount, aTmpChunk);
-
- for (uint32_t i = 0; i < inputChunkCount; ++i) {
- AudioChunk* chunk = inputChunks[i];
- nsAutoTArray channels;
- channels.AppendElements(chunk->mChannelData);
- if (channels.Length() < outputChannelCount) {
- AudioChannelsUpMix(&channels, outputChannelCount, nullptr);
- NS_ASSERTION(outputChannelCount == channels.Length(),
- "We called GetAudioChannelsSuperset to avoid this");
- }
-
- for (uint32_t c = 0; c < channels.Length(); ++c) {
- const float* inputData = static_cast(channels[c]);
- float* outputData = static_cast(const_cast(aTmpChunk->mChannelData[c]));
- if (inputData) {
- if (i == 0) {
- AudioBlockCopyChannelWithScale(inputData, chunk->mVolume, outputData);
- } else {
- AudioBlockAddChannelWithScale(inputData, chunk->mVolume, outputData);
- }
- } else {
- if (i == 0) {
- memset(outputData, 0, WEBAUDIO_BLOCK_SIZE*sizeof(float));
- }
- }
- }
- }
-
- return aTmpChunk;
-}
-
-// The MediaStreamGraph guarantees that this is actually one block, for
-// AudioNodeStreams.
-void
-AudioNodeStream::ProduceOutput(GraphTime aFrom, GraphTime aTo)
-{
- StreamBuffer::Track* track = EnsureTrack();
-
- AudioChunk outputChunk;
- AudioSegment* segment = track->Get();
-
- if (mInCycle) {
- // XXX DelayNode not supported yet so just produce silence
- outputChunk.SetNull(WEBAUDIO_BLOCK_SIZE);
- } else {
- AudioChunk tmpChunk;
- AudioChunk* inputChunk = ObtainInputBlock(&tmpChunk);
- bool finished = false;
- mEngine->ProduceAudioBlock(this, *inputChunk, &outputChunk, &finished);
- if (finished) {
- FinishOutput();
- }
- }
-
- mLastChunk = segment->AppendAndConsumeChunk(&outputChunk);
-
- for (uint32_t j = 0; j < mListeners.Length(); ++j) {
- MediaStreamListener* l = mListeners[j];
- AudioChunk copyChunk = *mLastChunk;
- AudioSegment tmpSegment;
- tmpSegment.AppendAndConsumeChunk(©Chunk);
- l->NotifyQueuedTrackChanges(Graph(), AUDIO_NODE_STREAM_TRACK_ID,
- IdealAudioRate(), segment->GetDuration(), 0,
- tmpSegment);
- }
-}
-
-TrackTicks
-AudioNodeStream::GetCurrentPosition()
-{
- return EnsureTrack()->Get()->GetDuration();
-}
-
-void
-AudioNodeStream::FinishOutput()
-{
- if (IsFinishedOnGraphThread()) {
- return;
- }
-
- StreamBuffer::Track* track = EnsureTrack();
- track->SetEnded();
- FinishOnGraphThread();
-
- for (uint32_t j = 0; j < mListeners.Length(); ++j) {
- MediaStreamListener* l = mListeners[j];
- AudioSegment emptySegment;
- l->NotifyQueuedTrackChanges(Graph(), AUDIO_NODE_STREAM_TRACK_ID,
- IdealAudioRate(),
- track->GetSegment()->GetDuration(),
- MediaStreamListener::TRACK_EVENT_ENDED, emptySegment);
- }
-}
-
-}
diff --git a/content/media/AudioNodeStream.h b/content/media/AudioNodeStream.h
deleted file mode 100644
index 3eb25635d2b8..000000000000
--- a/content/media/AudioNodeStream.h
+++ /dev/null
@@ -1,82 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef MOZILLA_AUDIONODESTREAM_H_
-#define MOZILLA_AUDIONODESTREAM_H_
-
-#include "MediaStreamGraph.h"
-#include "AudioChannelFormat.h"
-#include "AudioNodeEngine.h"
-
-#ifdef PR_LOGGING
-#define LOG(type, msg) PR_LOG(gMediaStreamGraphLog, type, msg)
-#else
-#define LOG(type, msg)
-#endif
-
-namespace mozilla {
-
-class ThreadSharedFloatArrayBufferList;
-
-/**
- * An AudioNodeStream produces one audio track with ID AUDIO_TRACK.
- * The start time of the AudioTrack is aligned to the start time of the
- * AudioContext's destination node stream, plus some multiple of BLOCK_SIZE
- * samples.
- *
- * An AudioNodeStream has an AudioNodeEngine plugged into it that does the
- * actual audio processing. AudioNodeStream contains the glue code that
- * integrates audio processing with the MediaStreamGraph.
- */
-class AudioNodeStream : public ProcessedMediaStream {
-public:
- enum { AUDIO_TRACK = 1 };
-
- /**
- * Transfers ownership of aEngine to the new AudioNodeStream.
- */
- explicit AudioNodeStream(AudioNodeEngine* aEngine)
- : ProcessedMediaStream(nullptr), mEngine(aEngine), mLastChunk(nullptr)
- {
- }
- ~AudioNodeStream();
-
- // Control API
- /**
- * Sets a parameter that's a time relative to some stream's played time.
- * This time is converted to a time relative to this stream when it's set.
- */
- void SetStreamTimeParameter(uint32_t aIndex, MediaStream* aRelativeToStream,
- double aStreamTime);
- void SetDoubleParameter(uint32_t aIndex, double aValue);
- void SetInt32Parameter(uint32_t aIndex, int32_t aValue);
- void SetBuffer(already_AddRefed aBuffer);
-
- virtual AudioNodeStream* AsAudioNodeStream() { return this; }
-
- // Graph thread only
- void SetStreamTimeParameterImpl(uint32_t aIndex, MediaStream* aRelativeToStream,
- double aStreamTime);
- virtual void ProduceOutput(GraphTime aFrom, GraphTime aTo);
- TrackTicks GetCurrentPosition();
-
- // Any thread
- AudioNodeEngine* Engine() { return mEngine; }
-
-protected:
- void FinishOutput();
-
- StreamBuffer::Track* EnsureTrack();
- AudioChunk* ObtainInputBlock(AudioChunk* aTmpChunk);
-
- // The engine that will generate output for this node.
- nsAutoPtr mEngine;
- // The last block produced by this node.
- AudioChunk* mLastChunk;
-};
-
-}
-
-#endif /* MOZILLA_AUDIONODESTREAM_H_ */
diff --git a/content/media/AudioSegment.cpp b/content/media/AudioSegment.cpp
index 39f8e8a67b16..468b3d865944 100644
--- a/content/media/AudioSegment.cpp
+++ b/content/media/AudioSegment.cpp
@@ -89,6 +89,7 @@ AudioSegment::ApplyVolume(float aVolume)
}
static const int AUDIO_PROCESSING_FRAMES = 640; /* > 10ms of 48KHz audio */
+static const int GUESS_AUDIO_CHANNELS = 2;
static const uint8_t gZeroChannel[MAX_AUDIO_SAMPLE_SIZE*AUDIO_PROCESSING_FRAMES] = {0};
void
diff --git a/content/media/AudioSegment.h b/content/media/AudioSegment.h
index 757136c85416..3197cf162e22 100644
--- a/content/media/AudioSegment.h
+++ b/content/media/AudioSegment.h
@@ -15,11 +15,6 @@ namespace mozilla {
class AudioStream;
-/**
- * For auto-arrays etc, guess this as the common number of channels.
- */
-const int GUESS_AUDIO_CHANNELS = 2;
-
/**
* An AudioChunk represents a multi-channel buffer of audio samples.
* It references an underlying ThreadSharedObject which manages the lifetime
diff --git a/content/media/Makefile.in b/content/media/Makefile.in
index b9f01fe03b01..3edba24d0014 100644
--- a/content/media/Makefile.in
+++ b/content/media/Makefile.in
@@ -19,8 +19,6 @@ endif # !_MSC_VER
EXPORTS = \
AbstractMediaDecoder.h \
AudioChannelFormat.h \
- AudioNodeEngine.h \
- AudioNodeStream.h \
AudioSampleFormat.h \
AudioSegment.h \
BufferMediaResource.h \
@@ -48,8 +46,6 @@ EXPORTS = \
CPPSRCS = \
AudioChannelFormat.cpp \
- AudioNodeEngine.cpp \
- AudioNodeStream.cpp \
AudioSegment.cpp \
DecoderTraits.cpp \
FileBlockCache.cpp \
diff --git a/content/media/MediaDecoder.cpp b/content/media/MediaDecoder.cpp
index 0010d8314b30..b2e453b89525 100644
--- a/content/media/MediaDecoder.cpp
+++ b/content/media/MediaDecoder.cpp
@@ -157,36 +157,30 @@ void MediaDecoder::ConnectDecodedStreamToOutputStream(OutputStreamData* aStream)
}
MediaDecoder::DecodedStreamData::DecodedStreamData(MediaDecoder* aDecoder,
- int64_t aInitialTime,
- SourceMediaStream* aStream)
+ int64_t aInitialTime,
+ SourceMediaStream* aStream)
: mLastAudioPacketTime(-1),
mLastAudioPacketEndTime(-1),
mAudioFramesWritten(0),
mInitialTime(aInitialTime),
mNextVideoTime(aInitialTime),
- mDecoder(aDecoder),
mStreamInitialized(false),
mHaveSentFinish(false),
mHaveSentFinishAudio(false),
mHaveSentFinishVideo(false),
mStream(aStream),
+ mMainThreadListener(new DecodedStreamMainThreadListener(aDecoder)),
mHaveBlockedForPlayState(false)
{
- mStream->AddMainThreadListener(this);
+ mStream->AddMainThreadListener(mMainThreadListener);
}
MediaDecoder::DecodedStreamData::~DecodedStreamData()
{
- mStream->RemoveMainThreadListener(this);
+ mStream->RemoveMainThreadListener(mMainThreadListener);
mStream->Destroy();
}
-void
-MediaDecoder::DecodedStreamData::NotifyMainThreadStateChanged()
-{
- mDecoder->NotifyDecodedStreamMainThreadStateChanged();
-}
-
void MediaDecoder::DestroyDecodedStream()
{
MOZ_ASSERT(NS_IsMainThread());
diff --git a/content/media/MediaDecoder.h b/content/media/MediaDecoder.h
index 0466ef20b475..f545feaadfd3 100644
--- a/content/media/MediaDecoder.h
+++ b/content/media/MediaDecoder.h
@@ -235,6 +235,7 @@ class MediaDecoder : public nsIObserver,
{
public:
typedef mozilla::layers::Image Image;
+ class DecodedStreamMainThreadListener;
NS_DECL_ISUPPORTS
NS_DECL_NSIOBSERVER
@@ -339,7 +340,7 @@ public:
// replaying after the input as ended. In the latter case, the new source is
// not connected to streams created by captureStreamUntilEnded.
- struct DecodedStreamData MOZ_FINAL : public MainThreadMediaStreamListener {
+ struct DecodedStreamData {
DecodedStreamData(MediaDecoder* aDecoder,
int64_t aInitialTime, SourceMediaStream* aStream);
~DecodedStreamData();
@@ -357,7 +358,6 @@ public:
// Therefore video packets starting at or after this time need to be copied
// to the output stream.
int64_t mNextVideoTime; // microseconds
- MediaDecoder* mDecoder;
// The last video image sent to the stream. Useful if we need to replicate
// the image.
nsRefPtr mLastVideoImage;
@@ -372,11 +372,12 @@ public:
// The decoder is responsible for calling Destroy() on this stream.
// Can be read from any thread.
const nsRefPtr mStream;
+ // A listener object that receives notifications when mStream's
+ // main-thread-visible state changes. Used on the main thread only.
+ const nsRefPtr mMainThreadListener;
// True when we've explicitly blocked this stream because we're
// not in PLAY_STATE_PLAYING. Used on the main thread only.
bool mHaveBlockedForPlayState;
-
- virtual void NotifyMainThreadStateChanged() MOZ_OVERRIDE;
};
struct OutputStreamData {
void Init(ProcessedMediaStream* aStream, bool aFinishWhenEnded)
@@ -420,6 +421,16 @@ public:
GetReentrantMonitor().AssertCurrentThreadIn();
return mDecodedStream;
}
+ class DecodedStreamMainThreadListener : public MainThreadMediaStreamListener {
+ public:
+ DecodedStreamMainThreadListener(MediaDecoder* aDecoder)
+ : mDecoder(aDecoder) {}
+ virtual void NotifyMainThreadStateChanged()
+ {
+ mDecoder->NotifyDecodedStreamMainThreadStateChanged();
+ }
+ MediaDecoder* mDecoder;
+ };
// Add an output stream. All decoder output will be sent to the stream.
// The stream is initially blocked. The decoder is responsible for unblocking
diff --git a/content/media/MediaStreamGraph.cpp b/content/media/MediaStreamGraph.cpp
index c0a0342a486e..7e9de4d91209 100644
--- a/content/media/MediaStreamGraph.cpp
+++ b/content/media/MediaStreamGraph.cpp
@@ -3,8 +3,10 @@
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
-#include "MediaStreamGraphImpl.h"
+#include "MediaStreamGraph.h"
+#include "mozilla/Monitor.h"
+#include "mozilla/TimeStamp.h"
#include "AudioSegment.h"
#include "VideoSegment.h"
#include "nsContentUtils.h"
@@ -19,8 +21,6 @@
#include "TrackUnionStream.h"
#include "ImageContainer.h"
#include "AudioChannelCommon.h"
-#include "AudioNodeEngine.h"
-#include "AudioNodeStream.h"
#include
using namespace mozilla::layers;
@@ -30,8 +30,486 @@ namespace mozilla {
#ifdef PR_LOGGING
PRLogModuleInfo* gMediaStreamGraphLog;
+#define LOG(type, msg) PR_LOG(gMediaStreamGraphLog, type, msg)
+#else
+#define LOG(type, msg)
#endif
+namespace {
+
+/**
+ * Assume we can run an iteration of the MediaStreamGraph loop in this much time
+ * or less.
+ * We try to run the control loop at this rate.
+ */
+const int MEDIA_GRAPH_TARGET_PERIOD_MS = 10;
+
+/**
+ * Assume that we might miss our scheduled wakeup of the MediaStreamGraph by
+ * this much.
+ */
+const int SCHEDULE_SAFETY_MARGIN_MS = 10;
+
+/**
+ * Try have this much audio buffered in streams and queued to the hardware.
+ * The maximum delay to the end of the next control loop
+ * is 2*MEDIA_GRAPH_TARGET_PERIOD_MS + SCHEDULE_SAFETY_MARGIN_MS.
+ * There is no point in buffering more audio than this in a stream at any
+ * given time (until we add processing).
+ * This is not optimal yet.
+ */
+const int AUDIO_TARGET_MS = 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
+ SCHEDULE_SAFETY_MARGIN_MS;
+
+/**
+ * Try have this much video buffered. Video frames are set
+ * near the end of the iteration of the control loop. The maximum delay
+ * to the setting of the next video frame is 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
+ * SCHEDULE_SAFETY_MARGIN_MS. This is not optimal yet.
+ */
+const int VIDEO_TARGET_MS = 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
+ SCHEDULE_SAFETY_MARGIN_MS;
+
+/**
+ * A per-stream update message passed from the media graph thread to the
+ * main thread.
+ */
+struct StreamUpdate {
+ int64_t mGraphUpdateIndex;
+ nsRefPtr mStream;
+ StreamTime mNextMainThreadCurrentTime;
+ bool mNextMainThreadFinished;
+};
+
+/**
+ * This represents a message passed from the main thread to the graph thread.
+ * A ControlMessage always references a particular affected stream.
+ */
+class ControlMessage {
+public:
+ ControlMessage(MediaStream* aStream) : mStream(aStream)
+ {
+ MOZ_COUNT_CTOR(ControlMessage);
+ }
+ // All these run on the graph thread
+ virtual ~ControlMessage()
+ {
+ MOZ_COUNT_DTOR(ControlMessage);
+ }
+ // Do the action of this message on the MediaStreamGraph thread. Any actions
+ // affecting graph processing should take effect at mStateComputedTime.
+ // All stream data for times < mStateComputedTime has already been
+ // computed.
+ virtual void Run() = 0;
+ // When we're shutting down the application, most messages are ignored but
+ // some cleanup messages should still be processed (on the main thread).
+ virtual void RunDuringShutdown() {}
+ MediaStream* GetStream() { return mStream; }
+
+protected:
+ // We do not hold a reference to mStream. The graph will be holding
+ // a reference to the stream until the Destroy message is processed. The
+ // last message referencing a stream is the Destroy message for that stream.
+ MediaStream* mStream;
+};
+
+}
+
+/**
+ * The implementation of a media stream graph. This class is private to this
+ * file. It's not in the anonymous namespace because MediaStream needs to
+ * be able to friend it.
+ *
+ * Currently we only have one per process.
+ */
+class MediaStreamGraphImpl : public MediaStreamGraph {
+public:
+ MediaStreamGraphImpl();
+ ~MediaStreamGraphImpl()
+ {
+ NS_ASSERTION(IsEmpty(),
+ "All streams should have been destroyed by messages from the main thread");
+ LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p destroyed", this));
+ }
+
+ // Main thread only.
+ /**
+ * This runs every time we need to sync state from the media graph thread
+ * to the main thread while the main thread is not in the middle
+ * of a script. It runs during a "stable state" (per HTML5) or during
+ * an event posted to the main thread.
+ */
+ void RunInStableState();
+ /**
+ * Ensure a runnable to run RunInStableState is posted to the appshell to
+ * run at the next stable state (per HTML5).
+ * See EnsureStableStateEventPosted.
+ */
+ void EnsureRunInStableState();
+ /**
+ * Called to apply a StreamUpdate to its stream.
+ */
+ void ApplyStreamUpdate(StreamUpdate* aUpdate);
+ /**
+ * Append a ControlMessage to the message queue. This queue is drained
+ * during RunInStableState; the messages will run on the graph thread.
+ */
+ void AppendMessage(ControlMessage* aMessage);
+ /**
+ * Make this MediaStreamGraph enter forced-shutdown state. This state
+ * will be noticed by the media graph thread, which will shut down all streams
+ * and other state controlled by the media graph thread.
+ * This is called during application shutdown.
+ */
+ void ForceShutDown();
+ /**
+ * Shutdown() this MediaStreamGraph's threads and return when they've shut down.
+ */
+ void ShutdownThreads();
+
+ // The following methods run on the graph thread (or possibly the main thread if
+ // mLifecycleState > LIFECYCLE_RUNNING)
+ /**
+ * Runs main control loop on the graph thread. Normally a single invocation
+ * of this runs for the entire lifetime of the graph thread.
+ */
+ void RunThread();
+ /**
+ * Call this to indicate that another iteration of the control loop is
+ * required on its regular schedule. The monitor must not be held.
+ */
+ void EnsureNextIteration();
+ /**
+ * As above, but with the monitor already held.
+ */
+ void EnsureNextIterationLocked(MonitorAutoLock& aLock);
+ /**
+ * Call this to indicate that another iteration of the control loop is
+ * required immediately. The monitor must already be held.
+ */
+ void EnsureImmediateWakeUpLocked(MonitorAutoLock& aLock);
+ /**
+ * Ensure there is an event posted to the main thread to run RunInStableState.
+ * mMonitor must be held.
+ * See EnsureRunInStableState
+ */
+ void EnsureStableStateEventPosted();
+ /**
+ * Generate messages to the main thread to update it for all state changes.
+ * mMonitor must be held.
+ */
+ void PrepareUpdatesToMainThreadState();
+ // The following methods are the various stages of RunThread processing.
+ /**
+ * Compute a new current time for the graph and advance all on-graph-thread
+ * state to the new current time.
+ */
+ void UpdateCurrentTime();
+ /**
+ * Update the consumption state of aStream to reflect whether its data
+ * is needed or not.
+ */
+ void UpdateConsumptionState(SourceMediaStream* aStream);
+ /**
+ * Extract any state updates pending in aStream, and apply them.
+ */
+ void ExtractPendingInput(SourceMediaStream* aStream,
+ GraphTime aDesiredUpToTime,
+ bool* aEnsureNextIteration);
+ /**
+ * Update "have enough data" flags in aStream.
+ */
+ void UpdateBufferSufficiencyState(SourceMediaStream* aStream);
+ /*
+ * If aStream hasn't already been ordered, push it onto aStack and order
+ * its children.
+ */
+ void UpdateStreamOrderForStream(nsTArray* aStack,
+ already_AddRefed aStream);
+ /**
+ * Mark aStream and all its inputs (recursively) as consumed.
+ */
+ static void MarkConsumed(MediaStream* aStream);
+ /**
+ * Sort mStreams so that every stream not in a cycle is after any streams
+ * it depends on, and every stream in a cycle is marked as being in a cycle.
+ * Also sets mIsConsumed on every stream.
+ */
+ void UpdateStreamOrder();
+ /**
+ * Compute the blocking states of streams from mStateComputedTime
+ * until the desired future time aEndBlockingDecisions.
+ * Updates mStateComputedTime and sets MediaStream::mBlocked
+ * for all streams.
+ */
+ void RecomputeBlocking(GraphTime aEndBlockingDecisions);
+ // The following methods are used to help RecomputeBlocking.
+ /**
+ * If aStream isn't already in aStreams, add it and recursively call
+ * AddBlockingRelatedStreamsToSet on all the streams whose blocking
+ * status could depend on or affect the state of aStream.
+ */
+ void AddBlockingRelatedStreamsToSet(nsTArray* aStreams,
+ MediaStream* aStream);
+ /**
+ * Mark a stream blocked at time aTime. If this results in decisions that need
+ * to be revisited at some point in the future, *aEnd will be reduced to the
+ * first time in the future to recompute those decisions.
+ */
+ void MarkStreamBlocking(MediaStream* aStream);
+ /**
+ * Recompute blocking for the streams in aStreams for the interval starting at aTime.
+ * If this results in decisions that need to be revisited at some point
+ * in the future, *aEnd will be reduced to the first time in the future to
+ * recompute those decisions.
+ */
+ void RecomputeBlockingAt(const nsTArray& aStreams,
+ GraphTime aTime, GraphTime aEndBlockingDecisions,
+ GraphTime* aEnd);
+ /**
+ * Returns true if aStream will underrun at aTime for its own playback.
+ * aEndBlockingDecisions is when we plan to stop making blocking decisions.
+ * *aEnd will be reduced to the first time in the future to recompute these
+ * decisions.
+ */
+ bool WillUnderrun(MediaStream* aStream, GraphTime aTime,
+ GraphTime aEndBlockingDecisions, GraphTime* aEnd);
+ /**
+ * Given a graph time aTime, convert it to a stream time taking into
+ * account the time during which aStream is scheduled to be blocked.
+ */
+ StreamTime GraphTimeToStreamTime(MediaStream* aStream, GraphTime aTime);
+ enum {
+ INCLUDE_TRAILING_BLOCKED_INTERVAL = 0x01
+ };
+ /**
+ * Given a stream time aTime, convert it to a graph time taking into
+ * account the time during which aStream is scheduled to be blocked.
+ * aTime must be <= mStateComputedTime since blocking decisions
+ * are only known up to that point.
+ * If aTime is exactly at the start of a blocked interval, then the blocked
+ * interval is included in the time returned if and only if
+ * aFlags includes INCLUDE_TRAILING_BLOCKED_INTERVAL.
+ */
+ GraphTime StreamTimeToGraphTime(MediaStream* aStream, StreamTime aTime,
+ uint32_t aFlags = 0);
+ /**
+ * Get the current audio position of the stream's audio output.
+ */
+ GraphTime GetAudioPosition(MediaStream* aStream);
+ /**
+ * Call NotifyHaveCurrentData on aStream's listeners.
+ */
+ void NotifyHasCurrentData(MediaStream* aStream);
+ /**
+ * If aStream needs an audio stream but doesn't have one, create it.
+ * If aStream doesn't need an audio stream but has one, destroy it.
+ */
+ void CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTime,
+ MediaStream* aStream);
+ /**
+ * Queue audio (mix of stream audio and silence for blocked intervals)
+ * to the audio output stream.
+ */
+ void PlayAudio(MediaStream* aStream, GraphTime aFrom, GraphTime aTo);
+ /**
+ * Set the correct current video frame for stream aStream.
+ */
+ void PlayVideo(MediaStream* aStream);
+ /**
+ * No more data will be forthcoming for aStream. The stream will end
+ * at the current buffer end point. The StreamBuffer's tracks must be
+ * explicitly set to finished by the caller.
+ */
+ void FinishStream(MediaStream* aStream);
+ /**
+ * Compute how much stream data we would like to buffer for aStream.
+ */
+ StreamTime GetDesiredBufferEnd(MediaStream* aStream);
+ /**
+ * Returns true when there are no active streams.
+ */
+ bool IsEmpty() { return mStreams.IsEmpty() && mPortCount == 0; }
+
+ // For use by control messages
+ /**
+ * Identify which graph update index we are currently processing.
+ */
+ int64_t GetProcessingGraphUpdateIndex() { return mProcessingGraphUpdateIndex; }
+ /**
+ * Add aStream to the graph and initializes its graph-specific state.
+ */
+ void AddStream(MediaStream* aStream);
+ /**
+ * Remove aStream from the graph. Ensures that pending messages about the
+ * stream back to the main thread are flushed.
+ */
+ void RemoveStream(MediaStream* aStream);
+ /**
+ * Remove aPort from the graph and release it.
+ */
+ void DestroyPort(MediaInputPort* aPort);
+
+ // Data members
+
+ /**
+ * Media graph thread.
+ * Readonly after initialization on the main thread.
+ */
+ nsCOMPtr mThread;
+
+ // The following state is managed on the graph thread only, unless
+ // mLifecycleState > LIFECYCLE_RUNNING in which case the graph thread
+ // is not running and this state can be used from the main thread.
+
+ nsTArray > mStreams;
+ /**
+ * The current graph time for the current iteration of the RunThread control
+ * loop.
+ */
+ GraphTime mCurrentTime;
+ /**
+ * Blocking decisions and all stream contents have been computed up to this
+ * time. The next batch of updates from the main thread will be processed
+ * at this time. Always >= mCurrentTime.
+ */
+ GraphTime mStateComputedTime;
+ /**
+ * This is only used for logging.
+ */
+ TimeStamp mInitialTimeStamp;
+ /**
+ * The real timestamp of the latest run of UpdateCurrentTime.
+ */
+ TimeStamp mCurrentTimeStamp;
+ /**
+ * Which update batch we are currently processing.
+ */
+ int64_t mProcessingGraphUpdateIndex;
+ /**
+ * Number of active MediaInputPorts
+ */
+ int32_t mPortCount;
+
+ // mMonitor guards the data below.
+ // MediaStreamGraph normally does its work without holding mMonitor, so it is
+ // not safe to just grab mMonitor from some thread and start monkeying with
+ // the graph. Instead, communicate with the graph thread using provided
+ // mechanisms such as the ControlMessage queue.
+ Monitor mMonitor;
+
+ // Data guarded by mMonitor (must always be accessed with mMonitor held,
+ // regardless of the value of mLifecycleState.
+
+ /**
+ * State to copy to main thread
+ */
+ nsTArray mStreamUpdates;
+ /**
+ * Runnables to run after the next update to main thread state.
+ */
+ nsTArray > mUpdateRunnables;
+ struct MessageBlock {
+ int64_t mGraphUpdateIndex;
+ nsTArray > mMessages;
+ };
+ /**
+ * A list of batches of messages to process. Each batch is processed
+ * as an atomic unit.
+ */
+ nsTArray mMessageQueue;
+ /**
+ * This enum specifies where this graph is in its lifecycle. This is used
+ * to control shutdown.
+ * Shutdown is tricky because it can happen in two different ways:
+ * 1) Shutdown due to inactivity. RunThread() detects that it has no
+ * pending messages and no streams, and exits. The next RunInStableState()
+ * checks if there are new pending messages from the main thread (true only
+ * if new stream creation raced with shutdown); if there are, it revives
+ * RunThread(), otherwise it commits to shutting down the graph. New stream
+ * creation after this point will create a new graph. An async event is
+ * dispatched to Shutdown() the graph's threads and then delete the graph
+ * object.
+ * 2) Forced shutdown at application shutdown. A flag is set, RunThread()
+ * detects the flag and exits, the next RunInStableState() detects the flag,
+ * and dispatches the async event to Shutdown() the graph's threads. However
+ * the graph object is not deleted. New messages for the graph are processed
+ * synchronously on the main thread if necessary. When the last stream is
+ * destroyed, the graph object is deleted.
+ */
+ enum LifecycleState {
+ // The graph thread hasn't started yet.
+ LIFECYCLE_THREAD_NOT_STARTED,
+ // RunThread() is running normally.
+ LIFECYCLE_RUNNING,
+ // In the following states, the graph thread is not running so
+ // all "graph thread only" state in this class can be used safely
+ // on the main thread.
+ // RunThread() has exited and we're waiting for the next
+ // RunInStableState(), at which point we can clean up the main-thread
+ // side of the graph.
+ LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP,
+ // RunInStableState() posted a ShutdownRunnable, and we're waiting for it
+ // to shut down the graph thread(s).
+ LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN,
+ // Graph threads have shut down but we're waiting for remaining streams
+ // to be destroyed. Only happens during application shutdown since normally
+ // we'd only shut down a graph when it has no streams.
+ LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION
+ };
+ LifecycleState mLifecycleState;
+ /**
+ * This enum specifies the wait state of the graph thread.
+ */
+ enum WaitState {
+ // RunThread() is running normally
+ WAITSTATE_RUNNING,
+ // RunThread() is paused waiting for its next iteration, which will
+ // happen soon
+ WAITSTATE_WAITING_FOR_NEXT_ITERATION,
+ // RunThread() is paused indefinitely waiting for something to change
+ WAITSTATE_WAITING_INDEFINITELY,
+ // Something has signaled RunThread() to wake up immediately,
+ // but it hasn't done so yet
+ WAITSTATE_WAKING_UP
+ };
+ WaitState mWaitState;
+ /**
+ * True when another iteration of the control loop is required.
+ */
+ bool mNeedAnotherIteration;
+ /**
+ * True when we need to do a forced shutdown during application shutdown.
+ */
+ bool mForceShutDown;
+ /**
+ * True when we have posted an event to the main thread to run
+ * RunInStableState() and the event hasn't run yet.
+ */
+ bool mPostedRunInStableStateEvent;
+
+ // Main thread only
+
+ /**
+ * Messages posted by the current event loop task. These are forwarded to
+ * the media graph thread during RunInStableState. We can't forward them
+ * immediately because we want all messages between stable states to be
+ * processed as an atomic batch.
+ */
+ nsTArray > mCurrentTaskMessageQueue;
+ /**
+ * True when RunInStableState has determined that mLifecycleState is >
+ * LIFECYCLE_RUNNING. Since only the main thread can reset mLifecycleState to
+ * LIFECYCLE_RUNNING, this can be relied on to not change unexpectedly.
+ */
+ bool mDetectedNotRunning;
+ /**
+ * True when a stable state runner has been posted to the appshell to run
+ * RunInStableState at the next stable state.
+ */
+ bool mPostedRunInStableState;
+};
+
/**
* The singleton graph instance.
*/
@@ -226,16 +704,7 @@ MediaStreamGraphImpl::GraphTimeToStreamTime(MediaStream* aStream,
t = end;
}
return std::max(0, s);
-}
-
-StreamTime
-MediaStreamGraphImpl::GraphTimeToStreamTimeOptimistic(MediaStream* aStream,
- GraphTime aTime)
-{
- GraphTime computedUpToTime = std::min(mStateComputedTime, aTime);
- StreamTime s = GraphTimeToStreamTime(aStream, computedUpToTime);
- return s + (aTime - computedUpToTime);
-}
+}
GraphTime
MediaStreamGraphImpl::StreamTimeToGraphTime(MediaStream* aStream,
@@ -303,7 +772,15 @@ MediaStreamGraphImpl::UpdateCurrentTime()
SecondsToMediaTime((now - mCurrentTimeStamp).ToSeconds()) + mCurrentTime;
if (mStateComputedTime < nextCurrentTime) {
LOG(PR_LOG_WARNING, ("Media graph global underrun detected"));
- nextCurrentTime = mStateComputedTime;
+ LOG(PR_LOG_DEBUG, ("Advancing mStateComputedTime from %f to %f",
+ MediaTimeToSeconds(mStateComputedTime),
+ MediaTimeToSeconds(nextCurrentTime)));
+ // Advance mStateComputedTime to nextCurrentTime by
+ // adding blocked time to all streams starting at mStateComputedTime
+ for (uint32_t i = 0; i < mStreams.Length(); ++i) {
+ mStreams[i]->mBlocked.SetAtAndAfter(mStateComputedTime, true);
+ }
+ mStateComputedTime = nextCurrentTime;
}
mCurrentTimeStamp = now;
@@ -876,40 +1353,6 @@ MediaStreamGraphImpl::EnsureNextIterationLocked(MonitorAutoLock& aLock)
}
}
-static GraphTime
-RoundUpToAudioBlock(GraphTime aTime)
-{
- TrackRate rate = IdealAudioRate();
- int64_t ticksAtIdealRate = (aTime*rate) >> MEDIA_TIME_FRAC_BITS;
- // Round up to nearest block boundary
- int64_t blocksAtIdealRate =
- (ticksAtIdealRate + (WEBAUDIO_BLOCK_SIZE - 1)) >>
- WEBAUDIO_BLOCK_SIZE_BITS;
- // Round up to nearest MediaTime unit
- return
- ((((blocksAtIdealRate + 1)*WEBAUDIO_BLOCK_SIZE) << MEDIA_TIME_FRAC_BITS)
- + rate - 1)/rate;
-}
-
-void
-MediaStreamGraphImpl::ProduceDataForStreamsBlockByBlock(uint32_t aStreamIndex,
- GraphTime aFrom,
- GraphTime aTo)
-{
- GraphTime t = aFrom;
- while (t < aTo) {
- GraphTime next = RoundUpToAudioBlock(t + 1);
- for (uint32_t i = aStreamIndex; i < mStreams.Length(); ++i) {
- ProcessedMediaStream* ps = mStreams[i]->AsProcessedStream();
- if (ps) {
- ps->ProduceOutput(t, next);
- }
- }
- t = next;
- }
- NS_ASSERTION(t == aTo, "Something went wrong with rounding to block boundaries");
-}
-
void
MediaStreamGraphImpl::RunThread()
{
@@ -941,8 +1384,9 @@ MediaStreamGraphImpl::RunThread()
UpdateStreamOrder();
+ int32_t writeAudioUpTo = AUDIO_TARGET_MS;
GraphTime endBlockingDecisions =
- RoundUpToAudioBlock(mCurrentTime + MillisecondsToMediaTime(AUDIO_TARGET_MS));
+ mCurrentTime + MillisecondsToMediaTime(writeAudioUpTo);
bool ensureNextIteration = false;
// Grab pending stream input.
@@ -961,27 +1405,15 @@ MediaStreamGraphImpl::RunThread()
// Play stream contents.
uint32_t audioStreamsActive = 0;
bool allBlockedForever = true;
- // True when we've done ProduceOutput for all processed streams.
- bool doneAllProducing = false;
// Figure out what each stream wants to do
for (uint32_t i = 0; i < mStreams.Length(); ++i) {
MediaStream* stream = mStreams[i];
- if (!doneAllProducing && !stream->IsFinishedOnGraphThread()) {
- ProcessedMediaStream* ps = stream->AsProcessedStream();
- if (ps) {
- AudioNodeStream* n = stream->AsAudioNodeStream();
- if (n) {
- // Since an AudioNodeStream is present, go ahead and
- // produce audio block by block for all the rest of the streams.
- ProduceDataForStreamsBlockByBlock(i, prevComputedTime, mStateComputedTime);
- doneAllProducing = true;
- } else {
- ps->ProduceOutput(prevComputedTime, mStateComputedTime);
- NS_ASSERTION(stream->mBuffer.GetEnd() >=
- GraphTimeToStreamTime(stream, mStateComputedTime),
- "Stream did not produce enough data");
- }
- }
+ ProcessedMediaStream* ps = stream->AsProcessedStream();
+ if (ps && !ps->mFinished) {
+ ps->ProduceOutput(prevComputedTime, mStateComputedTime);
+ NS_ASSERTION(stream->mBuffer.GetEnd() >=
+ GraphTimeToStreamTime(stream, mStateComputedTime),
+ "Stream did not produce enough data");
}
NotifyHasCurrentData(stream);
CreateOrDestroyAudioStreams(prevComputedTime, stream);
@@ -1338,18 +1770,6 @@ MediaStream::GraphTimeToStreamTime(GraphTime aTime)
return GraphImpl()->GraphTimeToStreamTime(this, aTime);
}
-StreamTime
-MediaStream::GraphTimeToStreamTimeOptimistic(GraphTime aTime)
-{
- return GraphImpl()->GraphTimeToStreamTimeOptimistic(this, aTime);
-}
-
-GraphTime
-MediaStream::StreamTimeToGraphTime(StreamTime aTime)
-{
- return GraphImpl()->StreamTimeToGraphTime(this, aTime, 0);
-}
-
void
MediaStream::FinishOnGraphThread()
{
@@ -1796,11 +2216,9 @@ MediaInputPort::Graph()
return gGraph;
}
-already_AddRefed
+MediaInputPort*
ProcessedMediaStream::AllocateInputPort(MediaStream* aStream, uint32_t aFlags)
{
- // This method creates two references to the MediaInputPort: one for
- // the main thread, and one for the MediaStreamGraph.
class Message : public ControlMessage {
public:
Message(MediaInputPort* aPort)
@@ -1809,14 +2227,13 @@ ProcessedMediaStream::AllocateInputPort(MediaStream* aStream, uint32_t aFlags)
virtual void Run()
{
mPort->Init();
- // The graph holds its reference implicitly
- mPort.forget();
}
- nsRefPtr mPort;
+ MediaInputPort* mPort;
};
- nsRefPtr port = new MediaInputPort(aStream, this, aFlags);
+ MediaInputPort* port = new MediaInputPort(aStream, this, aFlags);
+ NS_ADDREF(port);
GraphImpl()->AppendMessage(new Message(port));
- return port.forget();
+ return port;
}
void
@@ -1843,7 +2260,7 @@ ProcessedMediaStream::SetAutofinish(bool aAutofinish)
: ControlMessage(aStream), mAutofinish(aAutofinish) {}
virtual void Run()
{
- static_cast(mStream)->SetAutofinishImpl(mAutofinish);
+ mStream->AsProcessedStream()->SetAutofinishImpl(mAutofinish);
}
bool mAutofinish;
};
@@ -1943,13 +2360,4 @@ MediaStreamGraph::CreateTrackUnionStream(nsDOMMediaStream* aWrapper)
return stream;
}
-AudioNodeStream*
-MediaStreamGraph::CreateAudioNodeStream(AudioNodeEngine* aEngine)
-{
- AudioNodeStream* stream = new AudioNodeStream(aEngine);
- NS_ADDREF(stream);
- static_cast(this)->AppendMessage(new CreateMessage(stream));
- return stream;
-}
-
}
diff --git a/content/media/MediaStreamGraph.h b/content/media/MediaStreamGraph.h
index 18961d84e0db..b3039b6b85c6 100644
--- a/content/media/MediaStreamGraph.h
+++ b/content/media/MediaStreamGraph.h
@@ -172,16 +172,19 @@ public:
* This callback is invoked on the main thread when the main-thread-visible
* state of a stream has changed.
*
- * These methods are called with the media graph monitor held, so
- * reentry into general media graph methods is not possible.
+ * These methods are called without the media graph monitor held, so
+ * reentry into media graph methods is possible, although very much discouraged!
* You should do something non-blocking and non-reentrant (e.g. dispatch an
- * event) and return. DispatchFromMainThreadAfterNextStreamStateUpdate
- * would be a good choice.
+ * event) and return.
* The listener is allowed to synchronously remove itself from the stream, but
* not add or remove any other listeners.
*/
class MainThreadMediaStreamListener {
public:
+ virtual ~MainThreadMediaStreamListener() {}
+
+ NS_INLINE_DECL_REFCOUNTING(MainThreadMediaStreamListener)
+
virtual void NotifyMainThreadStateChanged() = 0;
};
@@ -189,9 +192,6 @@ class MediaStreamGraphImpl;
class SourceMediaStream;
class ProcessedMediaStream;
class MediaInputPort;
-class AudioNodeStream;
-class AudioNodeEngine;
-struct AudioChunk;
/**
* A stream of synchronized audio and video data. All (not blocked) streams
@@ -274,12 +274,7 @@ public:
, mMainThreadDestroyed(false)
{
}
- virtual ~MediaStream()
- {
- NS_ASSERTION(mMainThreadDestroyed, "Should have been destroyed already");
- NS_ASSERTION(mMainThreadListeners.IsEmpty(),
- "All main thread listeners should have been removed");
- }
+ virtual ~MediaStream() {}
/**
* Returns the graph that owns this stream.
@@ -308,15 +303,11 @@ public:
// Events will be dispatched by calling methods of aListener.
void AddListener(MediaStreamListener* aListener);
void RemoveListener(MediaStreamListener* aListener);
- // Events will be dispatched by calling methods of aListener. It is the
- // responsibility of the caller to remove aListener before it is destroyed.
void AddMainThreadListener(MainThreadMediaStreamListener* aListener)
{
NS_ASSERTION(NS_IsMainThread(), "Call only on main thread");
mMainThreadListeners.AppendElement(aListener);
}
- // It's safe to call this even if aListener is not currently a listener;
- // the call will be ignored.
void RemoveMainThreadListener(MainThreadMediaStreamListener* aListener)
{
NS_ASSERTION(NS_IsMainThread(), "Call only on main thread");
@@ -348,7 +339,6 @@ public:
virtual SourceMediaStream* AsSourceStream() { return nullptr; }
virtual ProcessedMediaStream* AsProcessedStream() { return nullptr; }
- virtual AudioNodeStream* AsAudioNodeStream() { return nullptr; }
// media graph thread only
void Init();
@@ -374,7 +364,7 @@ public:
{
mVideoOutputs.RemoveElement(aContainer);
}
- void ChangeExplicitBlockerCountImpl(GraphTime aTime, int32_t aDelta)
+ void ChangeExplicitBlockerCountImpl(StreamTime aTime, int32_t aDelta)
{
mExplicitBlockerCount.SetAtAndAfter(aTime, mExplicitBlockerCount.GetAt(aTime) + aDelta);
}
@@ -392,24 +382,7 @@ public:
}
const StreamBuffer& GetStreamBuffer() { return mBuffer; }
GraphTime GetStreamBufferStartTime() { return mBufferStartTime; }
- /**
- * Convert graph time to stream time. aTime must be <= mStateComputedTime
- * to ensure we know exactly how much time this stream will be blocked during
- * the interval.
- */
StreamTime GraphTimeToStreamTime(GraphTime aTime);
- /**
- * Convert graph time to stream time. aTime can be > mStateComputedTime,
- * in which case we optimistically assume the stream will not be blocked
- * after mStateComputedTime.
- */
- StreamTime GraphTimeToStreamTimeOptimistic(GraphTime aTime);
- /**
- * Convert stream time to graph time. The result can be > mStateComputedTime,
- * in which case we did the conversion optimistically assuming the stream
- * will not be blocked after mStateComputedTime.
- */
- GraphTime StreamTimeToGraphTime(StreamTime aTime);
bool IsFinishedOnGraphThread() { return mFinished; }
void FinishOnGraphThread();
@@ -451,7 +424,7 @@ protected:
// API, minus the number of times it has been explicitly unblocked.
TimeVarying mExplicitBlockerCount;
nsTArray > mListeners;
- nsTArray mMainThreadListeners;
+ nsTArray > mMainThreadListeners;
// Precomputed blocking status (over GraphTime).
// This is only valid between the graph's mCurrentTime and
@@ -770,8 +743,7 @@ public:
* Allocates a new input port attached to source aStream.
* This stream can be removed by calling MediaInputPort::Remove().
*/
- already_AddRefed AllocateInputPort(MediaStream* aStream,
- uint32_t aFlags = 0);
+ MediaInputPort* AllocateInputPort(MediaStream* aStream, uint32_t aFlags = 0);
/**
* Force this stream into the finished state.
*/
@@ -823,20 +795,12 @@ protected:
bool mInCycle;
};
-// Returns ideal audio rate for processing
-inline TrackRate IdealAudioRate() { return 48000; }
-
/**
* Initially, at least, we will have a singleton MediaStreamGraph per
* process.
*/
class MediaStreamGraph {
public:
- // We ensure that the graph current time advances in multiples of
- // IdealAudioBlockSize()/IdealAudioRate(). A stream that never blocks
- // and has a track with the ideal audio rate will produce audio in
- // multiples of the block size.
-
// Main thread only
static MediaStreamGraph* GetInstance();
// Control API.
@@ -860,11 +824,6 @@ public:
* particular tracks of each input stream.
*/
ProcessedMediaStream* CreateTrackUnionStream(nsDOMMediaStream* aWrapper);
- /**
- * Create a stream that will process audio for an AudioNode.
- * Takes ownership of aEngine.
- */
- AudioNodeStream* CreateAudioNodeStream(AudioNodeEngine* aEngine);
/**
* Returns the number of graph updates sent. This can be used to track
* whether a given update has been processed by the graph thread and reflected
@@ -878,9 +837,9 @@ public:
* main-thread stream state has been next updated.
* Should only be called during MediaStreamListener callbacks.
*/
- void DispatchToMainThreadAfterStreamStateUpdate(already_AddRefed aRunnable)
+ void DispatchToMainThreadAfterStreamStateUpdate(nsIRunnable* aRunnable)
{
- *mPendingUpdateRunnables.AppendElement() = aRunnable;
+ mPendingUpdateRunnables.AppendElement(aRunnable);
}
protected:
diff --git a/content/media/MediaStreamGraphImpl.h b/content/media/MediaStreamGraphImpl.h
deleted file mode 100644
index cc0ed6441e46..000000000000
--- a/content/media/MediaStreamGraphImpl.h
+++ /dev/null
@@ -1,517 +0,0 @@
-/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#ifndef MOZILLA_MEDIASTREAMGRAPHIMPL_H_
-#define MOZILLA_MEDIASTREAMGRAPHIMPL_H_
-
-#include "MediaStreamGraph.h"
-
-#include "mozilla/Monitor.h"
-#include "mozilla/TimeStamp.h"
-#include "nsIThread.h"
-#include "nsIRunnable.h"
-
-namespace mozilla {
-
-#ifdef PR_LOGGING
-extern PRLogModuleInfo* gMediaStreamGraphLog;
-#define LOG(type, msg) PR_LOG(gMediaStreamGraphLog, type, msg)
-#else
-#define LOG(type, msg)
-#endif
-
-/**
- * Assume we can run an iteration of the MediaStreamGraph loop in this much time
- * or less.
- * We try to run the control loop at this rate.
- */
-static const int MEDIA_GRAPH_TARGET_PERIOD_MS = 10;
-
-/**
- * Assume that we might miss our scheduled wakeup of the MediaStreamGraph by
- * this much.
- */
-static const int SCHEDULE_SAFETY_MARGIN_MS = 10;
-
-/**
- * Try have this much audio buffered in streams and queued to the hardware.
- * The maximum delay to the end of the next control loop
- * is 2*MEDIA_GRAPH_TARGET_PERIOD_MS + SCHEDULE_SAFETY_MARGIN_MS.
- * There is no point in buffering more audio than this in a stream at any
- * given time (until we add processing).
- * This is not optimal yet.
- */
-static const int AUDIO_TARGET_MS = 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
- SCHEDULE_SAFETY_MARGIN_MS;
-
-/**
- * Try have this much video buffered. Video frames are set
- * near the end of the iteration of the control loop. The maximum delay
- * to the setting of the next video frame is 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
- * SCHEDULE_SAFETY_MARGIN_MS. This is not optimal yet.
- */
-static const int VIDEO_TARGET_MS = 2*MEDIA_GRAPH_TARGET_PERIOD_MS +
- SCHEDULE_SAFETY_MARGIN_MS;
-
-/**
- * A per-stream update message passed from the media graph thread to the
- * main thread.
- */
-struct StreamUpdate {
- int64_t mGraphUpdateIndex;
- nsRefPtr mStream;
- StreamTime mNextMainThreadCurrentTime;
- bool mNextMainThreadFinished;
-};
-
-/**
- * This represents a message passed from the main thread to the graph thread.
- * A ControlMessage always references a particular affected stream.
- */
-class ControlMessage {
-public:
- ControlMessage(MediaStream* aStream) : mStream(aStream)
- {
- MOZ_COUNT_CTOR(ControlMessage);
- }
- // All these run on the graph thread
- virtual ~ControlMessage()
- {
- MOZ_COUNT_DTOR(ControlMessage);
- }
- // Do the action of this message on the MediaStreamGraph thread. Any actions
- // affecting graph processing should take effect at mStateComputedTime.
- // All stream data for times < mStateComputedTime has already been
- // computed.
- virtual void Run() = 0;
- // When we're shutting down the application, most messages are ignored but
- // some cleanup messages should still be processed (on the main thread).
- virtual void RunDuringShutdown() {}
- MediaStream* GetStream() { return mStream; }
-
-protected:
- // We do not hold a reference to mStream. The graph will be holding
- // a reference to the stream until the Destroy message is processed. The
- // last message referencing a stream is the Destroy message for that stream.
- MediaStream* mStream;
-};
-
-/**
- * The implementation of a media stream graph. This class is private to this
- * file. It's not in the anonymous namespace because MediaStream needs to
- * be able to friend it.
- *
- * Currently we only have one per process.
- */
-class MediaStreamGraphImpl : public MediaStreamGraph {
-public:
- MediaStreamGraphImpl();
- ~MediaStreamGraphImpl()
- {
- NS_ASSERTION(IsEmpty(),
- "All streams should have been destroyed by messages from the main thread");
- LOG(PR_LOG_DEBUG, ("MediaStreamGraph %p destroyed", this));
- }
-
- // Main thread only.
- /**
- * This runs every time we need to sync state from the media graph thread
- * to the main thread while the main thread is not in the middle
- * of a script. It runs during a "stable state" (per HTML5) or during
- * an event posted to the main thread.
- */
- void RunInStableState();
- /**
- * Ensure a runnable to run RunInStableState is posted to the appshell to
- * run at the next stable state (per HTML5).
- * See EnsureStableStateEventPosted.
- */
- void EnsureRunInStableState();
- /**
- * Called to apply a StreamUpdate to its stream.
- */
- void ApplyStreamUpdate(StreamUpdate* aUpdate);
- /**
- * Append a ControlMessage to the message queue. This queue is drained
- * during RunInStableState; the messages will run on the graph thread.
- */
- void AppendMessage(ControlMessage* aMessage);
- /**
- * Make this MediaStreamGraph enter forced-shutdown state. This state
- * will be noticed by the media graph thread, which will shut down all streams
- * and other state controlled by the media graph thread.
- * This is called during application shutdown.
- */
- void ForceShutDown();
- /**
- * Shutdown() this MediaStreamGraph's threads and return when they've shut down.
- */
- void ShutdownThreads();
-
- // The following methods run on the graph thread (or possibly the main thread if
- // mLifecycleState > LIFECYCLE_RUNNING)
- /**
- * Runs main control loop on the graph thread. Normally a single invocation
- * of this runs for the entire lifetime of the graph thread.
- */
- void RunThread();
- /**
- * Call this to indicate that another iteration of the control loop is
- * required on its regular schedule. The monitor must not be held.
- */
- void EnsureNextIteration();
- /**
- * As above, but with the monitor already held.
- */
- void EnsureNextIterationLocked(MonitorAutoLock& aLock);
- /**
- * Call this to indicate that another iteration of the control loop is
- * required immediately. The monitor must already be held.
- */
- void EnsureImmediateWakeUpLocked(MonitorAutoLock& aLock);
- /**
- * Ensure there is an event posted to the main thread to run RunInStableState.
- * mMonitor must be held.
- * See EnsureRunInStableState
- */
- void EnsureStableStateEventPosted();
- /**
- * Generate messages to the main thread to update it for all state changes.
- * mMonitor must be held.
- */
- void PrepareUpdatesToMainThreadState();
- // The following methods are the various stages of RunThread processing.
- /**
- * Compute a new current time for the graph and advance all on-graph-thread
- * state to the new current time.
- */
- void UpdateCurrentTime();
- /**
- * Update the consumption state of aStream to reflect whether its data
- * is needed or not.
- */
- void UpdateConsumptionState(SourceMediaStream* aStream);
- /**
- * Extract any state updates pending in aStream, and apply them.
- */
- void ExtractPendingInput(SourceMediaStream* aStream,
- GraphTime aDesiredUpToTime,
- bool* aEnsureNextIteration);
- /**
- * Update "have enough data" flags in aStream.
- */
- void UpdateBufferSufficiencyState(SourceMediaStream* aStream);
- /*
- * If aStream hasn't already been ordered, push it onto aStack and order
- * its children.
- */
- void UpdateStreamOrderForStream(nsTArray* aStack,
- already_AddRefed aStream);
- /**
- * Mark aStream and all its inputs (recursively) as consumed.
- */
- static void MarkConsumed(MediaStream* aStream);
- /**
- * Sort mStreams so that every stream not in a cycle is after any streams
- * it depends on, and every stream in a cycle is marked as being in a cycle.
- * Also sets mIsConsumed on every stream.
- */
- void UpdateStreamOrder();
- /**
- * Compute the blocking states of streams from mStateComputedTime
- * until the desired future time aEndBlockingDecisions.
- * Updates mStateComputedTime and sets MediaStream::mBlocked
- * for all streams.
- */
- void RecomputeBlocking(GraphTime aEndBlockingDecisions);
- // The following methods are used to help RecomputeBlocking.
- /**
- * If aStream isn't already in aStreams, add it and recursively call
- * AddBlockingRelatedStreamsToSet on all the streams whose blocking
- * status could depend on or affect the state of aStream.
- */
- void AddBlockingRelatedStreamsToSet(nsTArray* aStreams,
- MediaStream* aStream);
- /**
- * Mark a stream blocked at time aTime. If this results in decisions that need
- * to be revisited at some point in the future, *aEnd will be reduced to the
- * first time in the future to recompute those decisions.
- */
- void MarkStreamBlocking(MediaStream* aStream);
- /**
- * Recompute blocking for the streams in aStreams for the interval starting at aTime.
- * If this results in decisions that need to be revisited at some point
- * in the future, *aEnd will be reduced to the first time in the future to
- * recompute those decisions.
- */
- void RecomputeBlockingAt(const nsTArray& aStreams,
- GraphTime aTime, GraphTime aEndBlockingDecisions,
- GraphTime* aEnd);
- /**
- * Produce data for all streams >= aStreamIndex for the given time interval.
- * Advances block by block, each iteration producing data for all streams
- * for a single block.
- * This is needed if there are WebAudio delay nodes, whose output for a block
- * may depend on the output of any other node (including itself) for the
- * previous block. This is probably also more performant due to better memory
- * locality.
- * This is called whenever we have an AudioNodeStream in the graph.
- */
- void ProduceDataForStreamsBlockByBlock(uint32_t aStreamIndex,
- GraphTime aFrom,
- GraphTime aTo);
- /**
- * Returns true if aStream will underrun at aTime for its own playback.
- * aEndBlockingDecisions is when we plan to stop making blocking decisions.
- * *aEnd will be reduced to the first time in the future to recompute these
- * decisions.
- */
- bool WillUnderrun(MediaStream* aStream, GraphTime aTime,
- GraphTime aEndBlockingDecisions, GraphTime* aEnd);
- /**
- * Given a graph time aTime, convert it to a stream time taking into
- * account the time during which aStream is scheduled to be blocked.
- */
- StreamTime GraphTimeToStreamTime(MediaStream* aStream, GraphTime aTime);
- /**
- * Given a graph time aTime, convert it to a stream time taking into
- * account the time during which aStream is scheduled to be blocked, and
- * when we don't know whether it's blocked or not, we assume it's not blocked.
- */
- StreamTime GraphTimeToStreamTimeOptimistic(MediaStream* aStream, GraphTime aTime);
- enum {
- INCLUDE_TRAILING_BLOCKED_INTERVAL = 0x01
- };
- /**
- * Given a stream time aTime, convert it to a graph time taking into
- * account the time during which aStream is scheduled to be blocked.
- * aTime must be <= mStateComputedTime since blocking decisions
- * are only known up to that point.
- * If aTime is exactly at the start of a blocked interval, then the blocked
- * interval is included in the time returned if and only if
- * aFlags includes INCLUDE_TRAILING_BLOCKED_INTERVAL.
- */
- GraphTime StreamTimeToGraphTime(MediaStream* aStream, StreamTime aTime,
- uint32_t aFlags = 0);
- /**
- * Get the current audio position of the stream's audio output.
- */
- GraphTime GetAudioPosition(MediaStream* aStream);
- /**
- * Call NotifyHaveCurrentData on aStream's listeners.
- */
- void NotifyHasCurrentData(MediaStream* aStream);
- /**
- * If aStream needs an audio stream but doesn't have one, create it.
- * If aStream doesn't need an audio stream but has one, destroy it.
- */
- void CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTime,
- MediaStream* aStream);
- /**
- * Queue audio (mix of stream audio and silence for blocked intervals)
- * to the audio output stream.
- */
- void PlayAudio(MediaStream* aStream, GraphTime aFrom, GraphTime aTo);
- /**
- * Set the correct current video frame for stream aStream.
- */
- void PlayVideo(MediaStream* aStream);
- /**
- * No more data will be forthcoming for aStream. The stream will end
- * at the current buffer end point. The StreamBuffer's tracks must be
- * explicitly set to finished by the caller.
- */
- void FinishStream(MediaStream* aStream);
- /**
- * Compute how much stream data we would like to buffer for aStream.
- */
- StreamTime GetDesiredBufferEnd(MediaStream* aStream);
- /**
- * Returns true when there are no active streams.
- */
- bool IsEmpty() { return mStreams.IsEmpty() && mPortCount == 0; }
-
- // For use by control messages
- /**
- * Identify which graph update index we are currently processing.
- */
- int64_t GetProcessingGraphUpdateIndex() { return mProcessingGraphUpdateIndex; }
- /**
- * Add aStream to the graph and initializes its graph-specific state.
- */
- void AddStream(MediaStream* aStream);
- /**
- * Remove aStream from the graph. Ensures that pending messages about the
- * stream back to the main thread are flushed.
- */
- void RemoveStream(MediaStream* aStream);
- /**
- * Remove aPort from the graph and release it.
- */
- void DestroyPort(MediaInputPort* aPort);
-
- // Data members
-
- /**
- * Media graph thread.
- * Readonly after initialization on the main thread.
- */
- nsCOMPtr mThread;
-
- // The following state is managed on the graph thread only, unless
- // mLifecycleState > LIFECYCLE_RUNNING in which case the graph thread
- // is not running and this state can be used from the main thread.
-
- nsTArray > mStreams;
- /**
- * The current graph time for the current iteration of the RunThread control
- * loop.
- */
- GraphTime mCurrentTime;
- /**
- * Blocking decisions and all stream contents have been computed up to this
- * time. The next batch of updates from the main thread will be processed
- * at this time. Always >= mCurrentTime.
- */
- GraphTime mStateComputedTime;
- /**
- * This is only used for logging.
- */
- TimeStamp mInitialTimeStamp;
- /**
- * The real timestamp of the latest run of UpdateCurrentTime.
- */
- TimeStamp mCurrentTimeStamp;
- /**
- * Which update batch we are currently processing.
- */
- int64_t mProcessingGraphUpdateIndex;
- /**
- * Number of active MediaInputPorts
- */
- int32_t mPortCount;
-
- // mMonitor guards the data below.
- // MediaStreamGraph normally does its work without holding mMonitor, so it is
- // not safe to just grab mMonitor from some thread and start monkeying with
- // the graph. Instead, communicate with the graph thread using provided
- // mechanisms such as the ControlMessage queue.
- Monitor mMonitor;
-
- // Data guarded by mMonitor (must always be accessed with mMonitor held,
- // regardless of the value of mLifecycleState.
-
- /**
- * State to copy to main thread
- */
- nsTArray mStreamUpdates;
- /**
- * Runnables to run after the next update to main thread state.
- */
- nsTArray > mUpdateRunnables;
- struct MessageBlock {
- int64_t mGraphUpdateIndex;
- nsTArray > mMessages;
- };
- /**
- * A list of batches of messages to process. Each batch is processed
- * as an atomic unit.
- */
- nsTArray mMessageQueue;
- /**
- * This enum specifies where this graph is in its lifecycle. This is used
- * to control shutdown.
- * Shutdown is tricky because it can happen in two different ways:
- * 1) Shutdown due to inactivity. RunThread() detects that it has no
- * pending messages and no streams, and exits. The next RunInStableState()
- * checks if there are new pending messages from the main thread (true only
- * if new stream creation raced with shutdown); if there are, it revives
- * RunThread(), otherwise it commits to shutting down the graph. New stream
- * creation after this point will create a new graph. An async event is
- * dispatched to Shutdown() the graph's threads and then delete the graph
- * object.
- * 2) Forced shutdown at application shutdown. A flag is set, RunThread()
- * detects the flag and exits, the next RunInStableState() detects the flag,
- * and dispatches the async event to Shutdown() the graph's threads. However
- * the graph object is not deleted. New messages for the graph are processed
- * synchronously on the main thread if necessary. When the last stream is
- * destroyed, the graph object is deleted.
- */
- enum LifecycleState {
- // The graph thread hasn't started yet.
- LIFECYCLE_THREAD_NOT_STARTED,
- // RunThread() is running normally.
- LIFECYCLE_RUNNING,
- // In the following states, the graph thread is not running so
- // all "graph thread only" state in this class can be used safely
- // on the main thread.
- // RunThread() has exited and we're waiting for the next
- // RunInStableState(), at which point we can clean up the main-thread
- // side of the graph.
- LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP,
- // RunInStableState() posted a ShutdownRunnable, and we're waiting for it
- // to shut down the graph thread(s).
- LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN,
- // Graph threads have shut down but we're waiting for remaining streams
- // to be destroyed. Only happens during application shutdown since normally
- // we'd only shut down a graph when it has no streams.
- LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION
- };
- LifecycleState mLifecycleState;
- /**
- * This enum specifies the wait state of the graph thread.
- */
- enum WaitState {
- // RunThread() is running normally
- WAITSTATE_RUNNING,
- // RunThread() is paused waiting for its next iteration, which will
- // happen soon
- WAITSTATE_WAITING_FOR_NEXT_ITERATION,
- // RunThread() is paused indefinitely waiting for something to change
- WAITSTATE_WAITING_INDEFINITELY,
- // Something has signaled RunThread() to wake up immediately,
- // but it hasn't done so yet
- WAITSTATE_WAKING_UP
- };
- WaitState mWaitState;
- /**
- * True when another iteration of the control loop is required.
- */
- bool mNeedAnotherIteration;
- /**
- * True when we need to do a forced shutdown during application shutdown.
- */
- bool mForceShutDown;
- /**
- * True when we have posted an event to the main thread to run
- * RunInStableState() and the event hasn't run yet.
- */
- bool mPostedRunInStableStateEvent;
-
- // Main thread only
-
- /**
- * Messages posted by the current event loop task. These are forwarded to
- * the media graph thread during RunInStableState. We can't forward them
- * immediately because we want all messages between stable states to be
- * processed as an atomic batch.
- */
- nsTArray > mCurrentTaskMessageQueue;
- /**
- * True when RunInStableState has determined that mLifecycleState is >
- * LIFECYCLE_RUNNING. Since only the main thread can reset mLifecycleState to
- * LIFECYCLE_RUNNING, this can be relied on to not change unexpectedly.
- */
- bool mDetectedNotRunning;
- /**
- * True when a stable state runner has been posted to the appshell to run
- * RunInStableState at the next stable state.
- */
- bool mPostedRunInStableState;
-};
-
-}
-
-#endif /* MEDIASTREAMGRAPHIMPL_H_ */
diff --git a/content/media/StreamBuffer.h b/content/media/StreamBuffer.h
index a5e8c86a60c3..81c1abd8b00f 100644
--- a/content/media/StreamBuffer.h
+++ b/content/media/StreamBuffer.h
@@ -35,18 +35,18 @@ const TrackRate TRACK_RATE_MAX = 1 << MEDIA_TIME_FRAC_BITS;
typedef int32_t TrackID;
const TrackID TRACK_NONE = 0;
-inline TrackTicks TimeToTicksRoundUp(TrackRate aRate, StreamTime aTime)
+inline TrackTicks TimeToTicksRoundUp(TrackRate aRate, StreamTime aMicroseconds)
{
NS_ASSERTION(0 < aRate && aRate <= TRACK_RATE_MAX, "Bad rate");
- NS_ASSERTION(0 <= aTime && aTime <= STREAM_TIME_MAX, "Bad time");
- return (aTime*aRate + (1 << MEDIA_TIME_FRAC_BITS) - 1) >> MEDIA_TIME_FRAC_BITS;
+ NS_ASSERTION(0 <= aMicroseconds && aMicroseconds <= STREAM_TIME_MAX, "Bad microseconds");
+ return (aMicroseconds*aRate + (1 << MEDIA_TIME_FRAC_BITS) - 1) >> MEDIA_TIME_FRAC_BITS;
}
-inline TrackTicks TimeToTicksRoundDown(TrackRate aRate, StreamTime aTime)
+inline TrackTicks TimeToTicksRoundDown(TrackRate aRate, StreamTime aMicroseconds)
{
NS_ASSERTION(0 < aRate && aRate <= TRACK_RATE_MAX, "Bad rate");
- NS_ASSERTION(0 <= aTime && aTime <= STREAM_TIME_MAX, "Bad time");
- return (aTime*aRate) >> MEDIA_TIME_FRAC_BITS;
+ NS_ASSERTION(0 <= aMicroseconds && aMicroseconds <= STREAM_TIME_MAX, "Bad microseconds");
+ return (aMicroseconds*aRate) >> MEDIA_TIME_FRAC_BITS;
}
inline StreamTime TicksToTimeRoundUp(TrackRate aRate, TrackTicks aTicks)
diff --git a/content/media/webaudio/AudioBuffer.cpp b/content/media/webaudio/AudioBuffer.cpp
index 83cbbbc95432..d30e14ef9f7f 100644
--- a/content/media/webaudio/AudioBuffer.cpp
+++ b/content/media/webaudio/AudioBuffer.cpp
@@ -10,14 +10,13 @@
#include "AudioContext.h"
#include "jsfriendapi.h"
#include "mozilla/ErrorResult.h"
-#include "AudioSegment.h"
namespace mozilla {
namespace dom {
NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(AudioBuffer)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mContext)
- NS_IMPL_CYCLE_COLLECTION_UNLINK(mJSChannels)
+ NS_IMPL_CYCLE_COLLECTION_UNLINK(mChannels)
NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
tmp->ClearJSChannels();
NS_IMPL_CYCLE_COLLECTION_UNLINK_END
@@ -29,8 +28,8 @@ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(AudioBuffer)
NS_IMPL_CYCLE_COLLECTION_TRACE_PRESERVED_WRAPPER
- for (uint32_t i = 0; i < tmp->mJSChannels.Length(); ++i) {
- NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mJSChannels[i])
+ for (uint32_t i = 0; i < tmp->mChannels.Length(); ++i) {
+ NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(mChannels[i])
}
NS_IMPL_CYCLE_COLLECTION_TRACE_END
@@ -42,7 +41,7 @@ NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(AudioBuffer)
NS_INTERFACE_MAP_END
AudioBuffer::AudioBuffer(AudioContext* aContext, uint32_t aLength,
- float aSampleRate)
+ uint32_t aSampleRate)
: mContext(aContext),
mLength(aLength),
mSampleRate(aSampleRate)
@@ -60,14 +59,14 @@ AudioBuffer::~AudioBuffer()
void
AudioBuffer::ClearJSChannels()
{
- mJSChannels.Clear();
+ mChannels.Clear();
NS_DROP_JS_OBJECTS(this, AudioBuffer);
}
bool
AudioBuffer::InitializeBuffers(uint32_t aNumberOfChannels, JSContext* aJSContext)
{
- if (!mJSChannels.SetCapacity(aNumberOfChannels)) {
+ if (!mChannels.SetCapacity(aNumberOfChannels)) {
return false;
}
for (uint32_t i = 0; i < aNumberOfChannels; ++i) {
@@ -75,7 +74,7 @@ AudioBuffer::InitializeBuffers(uint32_t aNumberOfChannels, JSContext* aJSContext
if (!array) {
return false;
}
- mJSChannels.AppendElement(array);
+ mChannels.AppendElement(array);
}
return true;
@@ -88,37 +87,15 @@ AudioBuffer::WrapObject(JSContext* aCx, JSObject* aScope,
return AudioBufferBinding::Wrap(aCx, aScope, this, aTriedToWrap);
}
-void
-AudioBuffer::RestoreJSChannelData(JSContext* aJSContext)
-{
- if (mSharedChannels) {
- for (uint32_t i = 0; i < mJSChannels.Length(); ++i) {
- const float* data = mSharedChannels->GetData(i);
- // The following code first zeroes the array and then copies our data
- // into it. We could avoid this with additional JS APIs to construct
- // an array (or ArrayBuffer) containing initial data.
- JSObject* array = JS_NewFloat32Array(aJSContext, mLength);
- memcpy(JS_GetFloat32ArrayData(array), data, sizeof(float)*mLength);
- mJSChannels[i] = array;
- }
-
- mSharedChannels = nullptr;
- mResampledChannels = nullptr;
- }
-}
-
JSObject*
AudioBuffer::GetChannelData(JSContext* aJSContext, uint32_t aChannel,
- ErrorResult& aRv)
+ ErrorResult& aRv) const
{
- if (aChannel >= NumberOfChannels()) {
+ if (aChannel >= mChannels.Length()) {
aRv.Throw(NS_ERROR_DOM_SYNTAX_ERR);
return nullptr;
}
-
- RestoreJSChannelData(aJSContext);
-
- return mJSChannels[aChannel];
+ return GetChannelData(aChannel);
}
void
@@ -126,83 +103,13 @@ AudioBuffer::SetChannelDataFromArrayBufferContents(JSContext* aJSContext,
uint32_t aChannel,
void* aContents)
{
- RestoreJSChannelData(aJSContext);
-
- MOZ_ASSERT(aChannel < NumberOfChannels());
+ MOZ_ASSERT(aChannel < mChannels.Length());
JSObject* arrayBuffer = JS_NewArrayBufferWithContents(aJSContext, aContents);
- mJSChannels[aChannel] = JS_NewFloat32ArrayWithBuffer(aJSContext, arrayBuffer,
- 0, -1);
- MOZ_ASSERT(mLength == JS_GetTypedArrayLength(mJSChannels[aChannel]));
+ mChannels[aChannel] = JS_NewFloat32ArrayWithBuffer(aJSContext, arrayBuffer,
+ 0, -1);
+ MOZ_ASSERT(mLength == JS_GetTypedArrayLength(mChannels[aChannel]));
}
-static already_AddRefed
-StealJSArrayDataIntoThreadSharedFloatArrayBufferList(JSContext* aJSContext,
- const nsTArray& aJSArrays)
-{
- nsRefPtr result =
- new ThreadSharedFloatArrayBufferList(aJSArrays.Length());
- for (uint32_t i = 0; i < aJSArrays.Length(); ++i) {
- JSObject* arrayBuffer = JS_GetArrayBufferViewBuffer(aJSArrays[i]);
- void* dataToFree = nullptr;
- uint8_t* stolenData = nullptr;
- if (arrayBuffer &&
- JS_StealArrayBufferContents(aJSContext, arrayBuffer, &dataToFree,
- &stolenData)) {
- result->SetData(i, dataToFree, reinterpret_cast(stolenData));
- } else {
- result->Clear();
- return result.forget();
- }
- }
- return result.forget();
-}
-
-ThreadSharedFloatArrayBufferList*
-AudioBuffer::GetThreadSharedChannelsForRate(JSContext* aJSContext, uint32_t aRate,
- uint32_t* aLength)
-{
- if (mResampledChannels && mResampledChannelsRate == aRate) {
- // return cached data
- *aLength = mResampledChannelsLength;
- return mResampledChannels;
- }
-
- if (!mSharedChannels) {
- // Steal JS data
- mSharedChannels =
- StealJSArrayDataIntoThreadSharedFloatArrayBufferList(aJSContext, mJSChannels);
- }
-
- if (mSampleRate == aRate) {
- *aLength = mLength;
- return mSharedChannels;
- }
-
- mResampledChannels = new ThreadSharedFloatArrayBufferList(NumberOfChannels());
-
- double newLengthD = ceil(Duration()*aRate);
- uint32_t newLength = uint32_t(newLengthD);
- *aLength = newLength;
- double size = sizeof(float)*NumberOfChannels()*newLengthD;
- if (size != uint32_t(size)) {
- return mResampledChannels;
- }
- float* outputData = static_cast(malloc(uint32_t(size)));
- if (!outputData) {
- return mResampledChannels;
- }
-
- for (uint32_t i = 0; i < NumberOfChannels(); ++i) {
- NS_ERROR("Resampling not supported yet");
- // const float* inputData = mSharedChannels->GetData(i);
- // Resample(inputData, mLength, mSampleRate, outputData, newLength, aRate);
- mResampledChannels->SetData(i, i == 0 ? outputData : nullptr, outputData);
- outputData += newLength;
- }
- mResampledChannelsRate = aRate;
- mResampledChannelsLength = newLength;
- return mResampledChannels;
-}
-
}
}
+
diff --git a/content/media/webaudio/AudioBuffer.h b/content/media/webaudio/AudioBuffer.h
index 3a7293947e93..0e39c0da2b1c 100644
--- a/content/media/webaudio/AudioBuffer.h
+++ b/content/media/webaudio/AudioBuffer.h
@@ -14,8 +14,6 @@
#include "nsAutoPtr.h"
#include "nsTArray.h"
#include "AudioContext.h"
-#include "AudioSegment.h"
-#include "AudioNodeEngine.h"
struct JSContext;
class JSObject;
@@ -26,18 +24,13 @@ class ErrorResult;
namespace dom {
-/**
- * An AudioBuffer keeps its data either in the mJSChannels objects, which
- * are Float32Arrays, or in mSharedChannels if the mJSChannels objects have
- * been neutered.
- */
class AudioBuffer MOZ_FINAL : public nsISupports,
public nsWrapperCache,
public EnableWebAudioCheck
{
public:
AudioBuffer(AudioContext* aContext, uint32_t aLength,
- float aSampleRate);
+ uint32_t aSampleRate);
~AudioBuffer();
// This function needs to be called in order to allocate
@@ -61,7 +54,7 @@ public:
return mSampleRate;
}
- int32_t Length() const
+ uint32_t Length() const
{
return mLength;
}
@@ -73,54 +66,28 @@ public:
uint32_t NumberOfChannels() const
{
- return mJSChannels.Length();
+ return mChannels.Length();
}
- /**
- * If mSharedChannels is non-null, copies its contents to
- * new Float32Arrays in mJSChannels. Returns a Float32Array.
- */
JSObject* GetChannelData(JSContext* aJSContext, uint32_t aChannel,
- ErrorResult& aRv);
-
+ ErrorResult& aRv) const;
JSObject* GetChannelData(uint32_t aChannel) const {
// Doesn't perform bounds checking
- MOZ_ASSERT(aChannel < mJSChannels.Length());
- return mJSChannels[aChannel];
+ MOZ_ASSERT(aChannel < mChannels.Length());
+ return mChannels[aChannel];
}
- /**
- * Returns a ThreadSharedFloatArrayBufferList containing the sample data
- * at aRate. Sets *aLength to the number of samples per channel.
- */
- ThreadSharedFloatArrayBufferList* GetThreadSharedChannelsForRate(JSContext* aContext,
- uint32_t aRate,
- uint32_t* aLength);
-
// aContents should either come from JS_AllocateArrayBufferContents or
// JS_StealArrayBufferContents.
void SetChannelDataFromArrayBufferContents(JSContext* aJSContext,
uint32_t aChannel,
void* aContents);
-protected:
- void RestoreJSChannelData(JSContext* aJSContext);
+private:
void ClearJSChannels();
nsRefPtr mContext;
- // Float32Arrays
- nsAutoTArray mJSChannels;
-
- // mSharedChannels aggregates the data from mJSChannels. This is non-null
- // if and only if the mJSChannels are neutered.
- nsRefPtr mSharedChannels;
-
- // One-element cache of resampled data. Can be non-null only if mSharedChannels
- // is non-null.
- nsRefPtr mResampledChannels;
- uint32_t mResampledChannelsRate;
- uint32_t mResampledChannelsLength;
-
+ FallibleTArray mChannels;
uint32_t mLength;
float mSampleRate;
};
diff --git a/content/media/webaudio/AudioBufferSourceNode.cpp b/content/media/webaudio/AudioBufferSourceNode.cpp
index 6ee1dbe0551e..004ffe686451 100644
--- a/content/media/webaudio/AudioBufferSourceNode.cpp
+++ b/content/media/webaudio/AudioBufferSourceNode.cpp
@@ -6,9 +6,6 @@
#include "AudioBufferSourceNode.h"
#include "mozilla/dom/AudioBufferSourceNodeBinding.h"
-#include "nsMathUtils.h"
-#include "AudioNodeEngine.h"
-#include "AudioNodeStream.h"
namespace mozilla {
namespace dom {
@@ -21,122 +18,9 @@ NS_INTERFACE_MAP_END_INHERITING(AudioSourceNode)
NS_IMPL_ADDREF_INHERITED(AudioBufferSourceNode, AudioSourceNode)
NS_IMPL_RELEASE_INHERITED(AudioBufferSourceNode, AudioSourceNode)
-class AudioBufferSourceNodeEngine : public AudioNodeEngine
-{
-public:
- AudioBufferSourceNodeEngine() :
- mStart(0), mStop(TRACK_TICKS_MAX), mOffset(0), mDuration(0) {}
-
- // START, OFFSET and DURATION are always set by start() (along with setting
- // mBuffer to something non-null).
- // STOP is set by stop().
- enum Parameters {
- START,
- STOP,
- OFFSET,
- DURATION
- };
- virtual void SetStreamTimeParameter(uint32_t aIndex, TrackTicks aParam)
- {
- switch (aIndex) {
- case START: mStart = aParam; break;
- case STOP: mStop = aParam; break;
- default:
- NS_ERROR("Bad AudioBufferSourceNodeEngine StreamTimeParameter");
- }
- }
- virtual void SetInt32Parameter(uint32_t aIndex, int32_t aParam)
- {
- switch (aIndex) {
- case OFFSET: mOffset = aParam; break;
- case DURATION: mDuration = aParam; break;
- default:
- NS_ERROR("Bad AudioBufferSourceNodeEngine Int32Parameter");
- }
- }
- virtual void SetBuffer(already_AddRefed aBuffer)
- {
- mBuffer = aBuffer;
- }
-
- virtual void ProduceAudioBlock(AudioNodeStream* aStream,
- const AudioChunk& aInput,
- AudioChunk* aOutput,
- bool* aFinished)
- {
- if (!mBuffer)
- return;
- TrackTicks currentPosition = aStream->GetCurrentPosition();
- if (currentPosition + WEBAUDIO_BLOCK_SIZE <= mStart) {
- aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
- return;
- }
- TrackTicks endTime = std::min(mStart + mDuration, mStop);
- // Don't set *aFinished just because we passed mStop. Maybe someone
- // will call stop() again with a different value.
- if (currentPosition + WEBAUDIO_BLOCK_SIZE >= mStart + mDuration) {
- *aFinished = true;
- }
- if (currentPosition >= endTime || mStart >= endTime) {
- aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
- return;
- }
-
- uint32_t channels = mBuffer->GetChannels();
- if (!channels) {
- aOutput->SetNull(WEBAUDIO_BLOCK_SIZE);
- return;
- }
-
- if (currentPosition >= mStart &&
- currentPosition + WEBAUDIO_BLOCK_SIZE <= endTime) {
- // Data is entirely within the buffer. Avoid copying it.
- aOutput->mDuration = WEBAUDIO_BLOCK_SIZE;
- aOutput->mBuffer = mBuffer;
- aOutput->mChannelData.SetLength(channels);
- for (uint32_t i = 0; i < channels; ++i) {
- aOutput->mChannelData[i] =
- mBuffer->GetData(i) + uintptr_t(currentPosition - mStart + mOffset);
- }
- aOutput->mVolume = 1.0f;
- aOutput->mBufferFormat = AUDIO_FORMAT_FLOAT32;
- return;
- }
-
- AllocateAudioBlock(channels, aOutput);
- TrackTicks start = std::max(currentPosition, mStart);
- TrackTicks end = std::min(currentPosition + WEBAUDIO_BLOCK_SIZE, endTime);
- WriteZeroesToAudioBlock(aOutput, 0, uint32_t(start - currentPosition));
- for (uint32_t i = 0; i < channels; ++i) {
- memcpy(static_cast(const_cast(aOutput->mChannelData[i])) +
- uint32_t(start - currentPosition),
- mBuffer->GetData(i) +
- uintptr_t(start - mStart + mOffset),
- uint32_t(end - start));
- }
- uint32_t endOffset = uint32_t(end - currentPosition);
- WriteZeroesToAudioBlock(aOutput, endOffset, WEBAUDIO_BLOCK_SIZE - endOffset);
- }
-
- TrackTicks mStart;
- TrackTicks mStop;
- nsRefPtr mBuffer;
- int32_t mOffset;
- int32_t mDuration;
-};
-
AudioBufferSourceNode::AudioBufferSourceNode(AudioContext* aContext)
: AudioSourceNode(aContext)
- , mStartCalled(false)
{
- SetProduceOwnOutput(true);
- mStream = aContext->Graph()->CreateAudioNodeStream(new AudioBufferSourceNodeEngine());
- mStream->AddMainThreadListener(this);
-}
-
-AudioBufferSourceNode::~AudioBufferSourceNode()
-{
- DestroyMediaStream();
}
JSObject*
@@ -146,75 +30,11 @@ AudioBufferSourceNode::WrapObject(JSContext* aCx, JSObject* aScope)
}
void
-AudioBufferSourceNode::Start(JSContext* aCx, double aWhen, double aOffset,
- const Optional& aDuration, ErrorResult& aRv)
+AudioBufferSourceNode::SetBuffer(AudioBuffer* aBuffer)
{
- if (mStartCalled) {
- aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
- return;
- }
- mStartCalled = true;
-
- AudioNodeStream* ns = static_cast(mStream.get());
- if (!mBuffer || !ns) {
- // Nothing to play, or we're already dead for some reason
- return;
- }
-
- uint32_t rate = Context()->GetRate();
- uint32_t lengthSamples;
- nsRefPtr data =
- mBuffer->GetThreadSharedChannelsForRate(aCx, rate, &lengthSamples);
- double length = double(lengthSamples)/rate;
- double offset = std::max(0.0, aOffset);
- double endOffset = aDuration.WasPassed() ?
- std::min(aOffset + aDuration.Value(), length) : length;
- if (offset >= endOffset) {
- return;
- }
-
- ns->SetBuffer(data.forget());
- // Don't set parameter unnecessarily
- if (aWhen > 0.0) {
- ns->SetStreamTimeParameter(AudioBufferSourceNodeEngine::START,
- Context()->DestinationStream(),
- aWhen);
- }
- int32_t offsetTicks = NS_lround(offset*rate);
- // Don't set parameter unnecessarily
- if (offsetTicks > 0) {
- ns->SetInt32Parameter(AudioBufferSourceNodeEngine::OFFSET, offsetTicks);
- }
- ns->SetInt32Parameter(AudioBufferSourceNodeEngine::DURATION,
- NS_lround(endOffset*rate) - offsetTicks);
-}
-
-void
-AudioBufferSourceNode::Stop(double aWhen, ErrorResult& aRv)
-{
- if (!mStartCalled) {
- aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
- return;
- }
-
- AudioNodeStream* ns = static_cast(mStream.get());
- if (!ns) {
- // We've already stopped and had our stream shut down
- return;
- }
-
- ns->SetStreamTimeParameter(AudioBufferSourceNodeEngine::STOP,
- Context()->DestinationStream(),
- std::max(0.0, aWhen));
-}
-
-void
-AudioBufferSourceNode::NotifyMainThreadStateChanged()
-{
- if (mStream->IsFinished()) {
- SetProduceOwnOutput(false);
- }
+ mBuffer = aBuffer;
}
}
}
+
diff --git a/content/media/webaudio/AudioBufferSourceNode.h b/content/media/webaudio/AudioBufferSourceNode.h
index 9836b6529ae0..b305f85c84e5 100644
--- a/content/media/webaudio/AudioBufferSourceNode.h
+++ b/content/media/webaudio/AudioBufferSourceNode.h
@@ -9,53 +9,31 @@
#include "AudioSourceNode.h"
#include "AudioBuffer.h"
-#include "mozilla/dom/BindingUtils.h"
namespace mozilla {
namespace dom {
-class AudioBufferSourceNode : public AudioSourceNode,
- public MainThreadMediaStreamListener
+class AudioBufferSourceNode : public AudioSourceNode
{
public:
explicit AudioBufferSourceNode(AudioContext* aContext);
- virtual ~AudioBufferSourceNode();
-
- virtual void DestroyMediaStream() MOZ_OVERRIDE
- {
- if (mStream) {
- mStream->RemoveMainThreadListener(this);
- }
- AudioSourceNode::DestroyMediaStream();
- }
- virtual bool SupportsMediaStreams() const MOZ_OVERRIDE
- {
- return true;
- }
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioBufferSourceNode, AudioSourceNode)
virtual JSObject* WrapObject(JSContext* aCx, JSObject* aScope);
- void Start(JSContext* aCx, double aWhen, double aOffset,
- const Optional& aDuration, ErrorResult& aRv);
- void Stop(double aWhen, ErrorResult& aRv);
+ void Start(double) { /* no-op for now */ }
+ void Stop(double) { /* no-op for now */ }
AudioBuffer* GetBuffer() const
{
return mBuffer;
}
- void SetBuffer(AudioBuffer* aBuffer)
- {
- mBuffer = aBuffer;
- }
-
- virtual void NotifyMainThreadStateChanged() MOZ_OVERRIDE;
+ void SetBuffer(AudioBuffer* aBuffer);
private:
nsRefPtr mBuffer;
- bool mStartCalled;
};
}
diff --git a/content/media/webaudio/AudioContext.cpp b/content/media/webaudio/AudioContext.cpp
index fadb11af4bf1..b52bff46f1aa 100644
--- a/content/media/webaudio/AudioContext.cpp
+++ b/content/media/webaudio/AudioContext.cpp
@@ -8,7 +8,6 @@
#include "nsContentUtils.h"
#include "nsIDOMWindow.h"
#include "mozilla/ErrorResult.h"
-#include "MediaStreamGraph.h"
#include "AudioDestinationNode.h"
#include "AudioBufferSourceNode.h"
#include "AudioBuffer.h"
@@ -29,14 +28,11 @@ NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE_3(AudioContext,
NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(AudioContext, AddRef)
NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(AudioContext, Release)
-static uint8_t gWebAudioOutputKey;
-
AudioContext::AudioContext(nsIDOMWindow* aWindow)
: mWindow(aWindow)
- , mDestination(new AudioDestinationNode(this, MediaStreamGraph::GetInstance()))
+ , mDestination(new AudioDestinationNode(this))
+ , mSampleRate(44100) // hard-code for now
{
- // Actually play audio
- mDestination->Stream()->AddAudioOutput(&gWebAudioOutputKey);
SetIsDOMBinding();
}
@@ -79,18 +75,11 @@ AudioContext::CreateBuffer(JSContext* aJSContext, uint32_t aNumberOfChannels,
uint32_t aLength, float aSampleRate,
ErrorResult& aRv)
{
- if (aLength > INT32_MAX) {
- aRv.Throw(NS_ERROR_OUT_OF_MEMORY);
- return nullptr;
- }
-
- nsRefPtr buffer =
- new AudioBuffer(this, int32_t(aLength), aSampleRate);
+ nsRefPtr buffer = new AudioBuffer(this, aLength, aSampleRate);
if (!buffer->InitializeBuffers(aNumberOfChannels, aJSContext)) {
aRv.Throw(NS_ERROR_OUT_OF_MEMORY);
return nullptr;
}
-
return buffer.forget();
}
@@ -175,17 +164,6 @@ AudioContext::RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob)
mDecodeJobs.RemoveElement(aDecodeJob);
}
-MediaStreamGraph*
-AudioContext::Graph() const
-{
- return Destination()->Stream()->Graph();
+}
}
-MediaStream*
-AudioContext::DestinationStream() const
-{
- return Destination()->Stream();
-}
-
-}
-}
diff --git a/content/media/webaudio/AudioContext.h b/content/media/webaudio/AudioContext.h
index 0668423a611b..d365ee7cbde1 100644
--- a/content/media/webaudio/AudioContext.h
+++ b/content/media/webaudio/AudioContext.h
@@ -17,9 +17,6 @@
#include "mozilla/dom/BindingUtils.h"
#include "mozilla/dom/AudioContextBinding.h"
#include "MediaBufferDecoder.h"
-#include "StreamBuffer.h"
-#include "MediaStreamGraph.h"
-#include "nsIDOMWindow.h"
struct JSContext;
class JSObject;
@@ -47,9 +44,10 @@ class AudioContext MOZ_FINAL : public nsWrapperCache,
public EnableWebAudioCheck
{
explicit AudioContext(nsIDOMWindow* aParentWindow);
- ~AudioContext();
public:
+ virtual ~AudioContext();
+
NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(AudioContext)
NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(AudioContext)
@@ -76,7 +74,7 @@ public:
float SampleRate() const
{
- return float(IdealAudioRate());
+ return mSampleRate;
}
AudioListener* Listener();
@@ -107,11 +105,6 @@ public:
DecodeSuccessCallback& aSuccessCallback,
const Optional >& aFailureCallback);
- uint32_t GetRate() const { return IdealAudioRate(); }
-
- MediaStreamGraph* Graph() const;
- MediaStream* DestinationStream() const;
-
private:
void RemoveFromDecodeQueue(WebAudioDecodeJob* aDecodeJob);
@@ -123,6 +116,7 @@ private:
nsRefPtr mListener;
MediaBufferDecoder mDecoder;
nsTArray > mDecodeJobs;
+ float mSampleRate;
};
}
diff --git a/content/media/webaudio/AudioDestinationNode.cpp b/content/media/webaudio/AudioDestinationNode.cpp
index baf2149a5eea..1db7fd58eedf 100644
--- a/content/media/webaudio/AudioDestinationNode.cpp
+++ b/content/media/webaudio/AudioDestinationNode.cpp
@@ -6,9 +6,6 @@
#include "AudioDestinationNode.h"
#include "mozilla/dom/AudioDestinationNodeBinding.h"
-#include "AudioNodeEngine.h"
-#include "AudioNodeStream.h"
-#include "MediaStreamGraph.h"
#include "nsContentUtils.h"
namespace mozilla {
@@ -24,10 +21,9 @@ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
NS_IMPL_CYCLE_COLLECTION_TRACE_WRAPPERCACHE(AudioDestinationNode)
-AudioDestinationNode::AudioDestinationNode(AudioContext* aContext, MediaStreamGraph* aGraph)
+AudioDestinationNode::AudioDestinationNode(AudioContext* aContext)
: AudioNode(aContext)
{
- mStream = aGraph->CreateAudioNodeStream(new AudioNodeEngine());
SetIsDOMBinding();
}
diff --git a/content/media/webaudio/AudioDestinationNode.h b/content/media/webaudio/AudioDestinationNode.h
index 94bf2d8a79ab..c2baeb7bea03 100644
--- a/content/media/webaudio/AudioDestinationNode.h
+++ b/content/media/webaudio/AudioDestinationNode.h
@@ -22,7 +22,7 @@ class AudioDestinationNode : public AudioNode,
public nsWrapperCache
{
public:
- AudioDestinationNode(AudioContext* aContext, MediaStreamGraph* aGraph);
+ explicit AudioDestinationNode(AudioContext* aContext);
NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS_INHERITED(AudioDestinationNode,
diff --git a/content/media/webaudio/AudioNode.cpp b/content/media/webaudio/AudioNode.cpp
index 424fe98c0d54..b3f047e82b7c 100644
--- a/content/media/webaudio/AudioNode.cpp
+++ b/content/media/webaudio/AudioNode.cpp
@@ -8,7 +8,6 @@
#include "AudioContext.h"
#include "nsContentUtils.h"
#include "mozilla/ErrorResult.h"
-#include "AudioNodeStream.h"
namespace mozilla {
namespace dom {
@@ -41,31 +40,16 @@ NS_INTERFACE_MAP_END
AudioNode::AudioNode(AudioContext* aContext)
: mContext(aContext)
- , mJSBindingFinalized(false)
- , mCanProduceOwnOutput(false)
- , mOutputEnded(false)
{
MOZ_ASSERT(aContext);
}
AudioNode::~AudioNode()
{
- DestroyMediaStream();
MOZ_ASSERT(mInputNodes.IsEmpty());
MOZ_ASSERT(mOutputNodes.IsEmpty());
}
-static uint32_t
-FindIndexOfNode(const nsTArray& aInputNodes, const AudioNode* aNode)
-{
- for (uint32_t i = 0; i < aInputNodes.Length(); ++i) {
- if (aInputNodes[i].mInputNode == aNode) {
- return i;
- }
- }
- return nsTArray::NoIndex;
-}
-
static uint32_t
FindIndexOfNodeWithPorts(const nsTArray& aInputNodes, const AudioNode* aNode,
uint32_t aInputPort, uint32_t aOutputPort)
@@ -80,54 +64,6 @@ FindIndexOfNodeWithPorts(const nsTArray& aInputNodes, cons
return nsTArray::NoIndex;
}
-void
-AudioNode::UpdateOutputEnded()
-{
- if (mOutputEnded) {
- // Already ended, so nothing to do.
- return;
- }
- if (mCanProduceOwnOutput ||
- !mInputNodes.IsEmpty() ||
- (!mJSBindingFinalized && NumberOfInputs() > 0)) {
- // This node could still produce output in the future.
- return;
- }
-
- mOutputEnded = true;
-
- // Addref this temporarily so the refcount bumping below doesn't destroy us
- // prematurely
- nsRefPtr kungFuDeathGrip = this;
-
- // The idea here is that we remove connections one by one, and at each step
- // the graph is in a valid state.
-
- // Disconnect inputs. We don't need them anymore.
- while (!mInputNodes.IsEmpty()) {
- uint32_t i = mInputNodes.Length() - 1;
- nsRefPtr input = mInputNodes[i].mInputNode.forget();
- mInputNodes.RemoveElementAt(i);
- NS_ASSERTION(mOutputNodes.Contains(this), "input/output inconsistency");
- input->mOutputNodes.RemoveElement(this);
- }
-
- while (!mOutputNodes.IsEmpty()) {
- uint32_t i = mOutputNodes.Length() - 1;
- nsRefPtr output = mOutputNodes[i].forget();
- mOutputNodes.RemoveElementAt(i);
- uint32_t inputIndex = FindIndexOfNode(output->mInputNodes, this);
- NS_ASSERTION(inputIndex != nsTArray::NoIndex, "input/output inconsistency");
- // It doesn't matter which one we remove, since we're going to remove all
- // entries for this node anyway.
- output->mInputNodes.RemoveElementAt(inputIndex);
-
- output->UpdateOutputEnded();
- }
-
- DestroyMediaStream();
-}
-
void
AudioNode::Connect(AudioNode& aDestination, uint32_t aOutput,
uint32_t aInput, ErrorResult& aRv)
@@ -143,11 +79,6 @@ AudioNode::Connect(AudioNode& aDestination, uint32_t aOutput,
return;
}
- if (IsOutputEnded() || aDestination.IsOutputEnded()) {
- // No need to connect since we're not going to produce anything other
- // than silence.
- return;
- }
if (FindIndexOfNodeWithPorts(aDestination.mInputNodes, this, aInput, aOutput) !=
nsTArray::NoIndex) {
// connection already exists.
@@ -165,14 +96,6 @@ AudioNode::Connect(AudioNode& aDestination, uint32_t aOutput,
input->mInputNode = this;
input->mInputPort = aInput;
input->mOutputPort = aOutput;
- if (SupportsMediaStreams() && aDestination.mStream) {
- // Connect streams in the MediaStreamGraph
- MOZ_ASSERT(aDestination.mStream->AsProcessedStream());
- ProcessedMediaStream* ps =
- static_cast(aDestination.mStream.get());
- input->mStreamPort =
- ps->AllocateInputPort(mStream, MediaInputPort::FLAG_BLOCK_OUTPUT);
- }
}
void
@@ -202,10 +125,6 @@ AudioNode::Disconnect(uint32_t aOutput, ErrorResult& aRv)
}
}
}
-
- for (uint32_t i = 0; i < outputsToUpdate.Length(); ++i) {
- outputsToUpdate[i]->UpdateOutputEnded();
- }
}
}
diff --git a/content/media/webaudio/AudioNode.h b/content/media/webaudio/AudioNode.h
index 82bb073ae288..fa361ac37187 100644
--- a/content/media/webaudio/AudioNode.h
+++ b/content/media/webaudio/AudioNode.h
@@ -13,7 +13,6 @@
#include "nsAutoPtr.h"
#include "nsTArray.h"
#include "AudioContext.h"
-#include "MediaStreamGraph.h"
struct JSContext;
@@ -23,25 +22,6 @@ class ErrorResult;
namespace dom {
-/**
- * The DOM object representing a Web Audio AudioNode.
- *
- * Each AudioNode has a MediaStream representing the actual
- * real-time processing and output of this AudioNode.
- *
- * We track the incoming and outgoing connections to other AudioNodes.
- * All connections are strong and thus rely on cycle collection to break them.
- * However, we also track whether an AudioNode is capable of producing output
- * in the future. If it isn't, then we break its connections to its inputs
- * and outputs, allowing nodes to be immediately disconnected. This
- * disconnection is done internally, invisible to DOM users.
- *
- * We say that a node cannot produce output in the future if it has no inputs
- * that can, and it is not producing output itself without any inputs, and
- * either it can never have any inputs or it has no JS wrapper. (If it has a
- * JS wrapper and can accept inputs, then a new input could be added in
- * the future.)
- */
class AudioNode : public nsISupports,
public EnableWebAudioCheck
{
@@ -49,34 +29,9 @@ public:
explicit AudioNode(AudioContext* aContext);
virtual ~AudioNode();
- // This should be idempotent (safe to call multiple times).
- // This should be called in the destructor of every class that overrides
- // this method.
- virtual void DestroyMediaStream()
- {
- if (mStream) {
- mStream->Destroy();
- mStream = nullptr;
- }
- }
-
- // This method should be overridden to return true in nodes
- // which support being hooked up to the Media Stream graph.
- virtual bool SupportsMediaStreams() const
- {
- return false;
- }
-
NS_DECL_CYCLE_COLLECTING_ISUPPORTS
NS_DECL_CYCLE_COLLECTION_CLASS(AudioNode)
- void JSBindingFinalized()
- {
- NS_ASSERTION(!mJSBindingFinalized, "JS binding already finalized");
- mJSBindingFinalized = true;
- UpdateOutputEnded();
- }
-
AudioContext* GetParentObject() const
{
return mContext;
@@ -98,49 +53,19 @@ public:
virtual uint32_t NumberOfInputs() const { return 1; }
virtual uint32_t NumberOfOutputs() const { return 1; }
- // This could possibly delete 'this'.
- void UpdateOutputEnded();
- bool IsOutputEnded() const { return mOutputEnded; }
-
struct InputNode {
- ~InputNode()
- {
- if (mStreamPort) {
- mStreamPort->Destroy();
- }
- }
-
// Strong reference.
// May be null if the source node has gone away.
nsRefPtr mInputNode;
- nsRefPtr mStreamPort;
// The index of the input port this node feeds into.
uint32_t mInputPort;
// The index of the output port this node comes out of.
uint32_t mOutputPort;
};
- MediaStream* Stream() { return mStream; }
-
- // Set this to true when the node can produce its own output even if there
- // are no inputs.
- void SetProduceOwnOutput(bool aCanProduceOwnOutput)
- {
- mCanProduceOwnOutput = aCanProduceOwnOutput;
- if (!aCanProduceOwnOutput) {
- UpdateOutputEnded();
- }
- }
-
private:
nsRefPtr mContext;
-protected:
- // Must be set in the constructor. Must not be null.
- // If MaxNumberOfInputs() is > 0, then mStream must be a ProcessedMediaStream.
- nsRefPtr mStream;
-
-private:
// For every InputNode, there is a corresponding entry in mOutputNodes of the
// InputNode's mInputNode.
nsTArray mInputNodes;
@@ -149,15 +74,6 @@ private:
// exact matching entry, since mOutputNodes doesn't include the port
// identifiers and the same node could be connected on multiple ports.
nsTArray > mOutputNodes;
- // True if the JS binding has been finalized (so script no longer has
- // a reference to this node).
- bool mJSBindingFinalized;
- // True if this node can produce its own output even when all inputs
- // have ended their output.
- bool mCanProduceOwnOutput;
- // True if this node can never produce anything except silence in the future.
- // Updated by UpdateOutputEnded().
- bool mOutputEnded;
};
}
diff --git a/content/media/webaudio/BiquadFilterNode.cpp b/content/media/webaudio/BiquadFilterNode.cpp
index 6db244f0f7ac..3d91d6956b0d 100644
--- a/content/media/webaudio/BiquadFilterNode.cpp
+++ b/content/media/webaudio/BiquadFilterNode.cpp
@@ -22,7 +22,9 @@ NS_IMPL_RELEASE_INHERITED(BiquadFilterNode, AudioNode)
static float
Nyquist(AudioContext* aContext)
{
- return 0.5f * aContext->SampleRate();
+ // TODO: Replace the hardcoded 44100 here with AudioContext::SampleRate()
+ // when we implement that.
+ return 0.5f * 44100;
}
BiquadFilterNode::BiquadFilterNode(AudioContext* aContext)
diff --git a/content/media/webaudio/test/test_AudioBuffer.html b/content/media/webaudio/test/test_AudioBuffer.html
index cd7112a915b5..b60b7fb7ca28 100644
--- a/content/media/webaudio/test/test_AudioBuffer.html
+++ b/content/media/webaudio/test/test_AudioBuffer.html
@@ -12,13 +12,13 @@
SimpleTest.waitForExplicitFinish();
addLoadEvent(function() {
SpecialPowers.setBoolPref("media.webaudio.enabled", true);
- var context = new AudioContext();
- var buffer = context.createBuffer(2, 2048, context.sampleRate);
+ var ac = new AudioContext();
+ var buffer = ac.createBuffer(2, 2048, 44100);
SpecialPowers.gc(); // Make sure that our channels are accessible after GC
ok(buffer, "Buffer was allocated successfully");
- is(buffer.sampleRate, context.sampleRate, "Correct sample rate");
+ is(buffer.sampleRate, 44100, "Correct sample rate");
is(buffer.length, 2048, "Correct length");
- ok(Math.abs(buffer.duration - 2048 / context.sampleRate) < 0.0001, "Correct duration");
+ ok(Math.abs(buffer.duration - 2048 / 44100) < 0.0001, "Correct duration");
is(buffer.numberOfChannels, 2, "Correct number of channels");
for (var i = 0; i < buffer.numberOfChannels; ++i) {
var buf = buffer.getChannelData(i);
diff --git a/content/media/webaudio/test/test_AudioContext.html b/content/media/webaudio/test/test_AudioContext.html
index c5c1eb65a4bc..b89f2c92dcfe 100644
--- a/content/media/webaudio/test/test_AudioContext.html
+++ b/content/media/webaudio/test/test_AudioContext.html
@@ -22,7 +22,7 @@ addLoadEvent(function() {
SpecialPowers.setBoolPref("media.webaudio.enabled", true);
var ac = new AudioContext();
ok(ac, "Create a AudioContext object");
- is(ac.sampleRate, 48000, "Correct sample rate");
+ is(ac.sampleRate, 44100, "Correct sample rate");
SpecialPowers.clearUserPref("media.webaudio.enabled");
SimpleTest.finish();
});
diff --git a/content/media/webaudio/test/test_biquadFilterNode.html b/content/media/webaudio/test/test_biquadFilterNode.html
index de6e029cb575..63efe926c4df 100644
--- a/content/media/webaudio/test/test_biquadFilterNode.html
+++ b/content/media/webaudio/test/test_biquadFilterNode.html
@@ -19,9 +19,9 @@ addLoadEvent(function() {
SpecialPowers.setBoolPref("media.webaudio.enabled", true);
var context = new AudioContext();
- var buffer = context.createBuffer(1, 2048, context.sampleRate);
+ var buffer = context.createBuffer(1, 2048, 44100);
for (var i = 0; i < 2048; ++i) {
- buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / context.sampleRate);
+ buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / 44100);
}
var destination = context.destination;
@@ -38,7 +38,7 @@ addLoadEvent(function() {
// Verify default values
is(filter.type, 0, "Correct default value for type");
near(filter.frequency.minValue, 10, "Correct min value for filter frequency");
- near(filter.frequency.maxValue, context.sampleRate/2, "Correct max value for filter frequency");
+ near(filter.frequency.maxValue, 22050, "Correct max value for filter frequency");
near(filter.frequency.defaultValue, 350, "Correct default value for filter frequency");
near(filter.Q.minValue, 0.001, "Correct min value for filter Q");
near(filter.Q.maxValue, 1000, "Correct max value for filter Q");
diff --git a/content/media/webaudio/test/test_decodeAudioData.html b/content/media/webaudio/test/test_decodeAudioData.html
index f7a0b6ed182f..f881ec155d56 100644
--- a/content/media/webaudio/test/test_decodeAudioData.html
+++ b/content/media/webaudio/test/test_decodeAudioData.html
@@ -254,13 +254,8 @@ expectTypeError(function() {
cx.decodeAudioData(new Uint8Array(100), callbackShouldNeverRun, callbackShouldNeverRun);
});
-if (cx.sampleRate == 44100) {
- // Now, let's get real!
- runNextTest();
-} else {
- todo(false, "Decoded data tests disabled; context sampleRate " + cx.sampleRate + " not supported");
- SimpleTest.finish();
-}
+// Now, let's get real!
+runNextTest();
diff --git a/content/media/webaudio/test/test_delayNode.html b/content/media/webaudio/test/test_delayNode.html
index 9a7721fc5d09..76a5bb35994b 100644
--- a/content/media/webaudio/test/test_delayNode.html
+++ b/content/media/webaudio/test/test_delayNode.html
@@ -15,9 +15,9 @@ addLoadEvent(function() {
SpecialPowers.setBoolPref("media.webaudio.enabled", true);
var context = new AudioContext();
- var buffer = context.createBuffer(1, 2048, context.sampleRate);
+ var buffer = context.createBuffer(1, 2048, 44100);
for (var i = 0; i < 2048; ++i) {
- buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / context.sampleRate);
+ buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / 44100);
}
var destination = context.destination;
diff --git a/content/media/webaudio/test/test_dynamicsCompressorNode.html b/content/media/webaudio/test/test_dynamicsCompressorNode.html
index 96266647eab0..97ec916a4775 100644
--- a/content/media/webaudio/test/test_dynamicsCompressorNode.html
+++ b/content/media/webaudio/test/test_dynamicsCompressorNode.html
@@ -18,9 +18,9 @@ addLoadEvent(function() {
SpecialPowers.setBoolPref("media.webaudio.enabled", true);
var context = new AudioContext();
- var buffer = context.createBuffer(1, 2048, context.sampleRate);
+ var buffer = context.createBuffer(1, 2048, 44100);
for (var i = 0; i < 2048; ++i) {
- buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / context.sampleRate);
+ buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / 44100);
}
var destination = context.destination;
diff --git a/content/media/webaudio/test/test_gainNode.html b/content/media/webaudio/test/test_gainNode.html
index ebec5c4ccfb0..55ffdba7cc4a 100644
--- a/content/media/webaudio/test/test_gainNode.html
+++ b/content/media/webaudio/test/test_gainNode.html
@@ -14,9 +14,9 @@ addLoadEvent(function() {
SpecialPowers.setBoolPref("media.webaudio.enabled", true);
var context = new AudioContext();
- var buffer = context.createBuffer(1, 2048, context.sampleRate);
+ var buffer = context.createBuffer(1, 2048, 44100);
for (var i = 0; i < 2048; ++i) {
- buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / context.sampleRate);
+ buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / 44100);
}
var destination = context.destination;
diff --git a/content/media/webaudio/test/test_pannerNode.html b/content/media/webaudio/test/test_pannerNode.html
index b531ebe7a8fc..4dbf16d509b3 100644
--- a/content/media/webaudio/test/test_pannerNode.html
+++ b/content/media/webaudio/test/test_pannerNode.html
@@ -18,9 +18,9 @@ addLoadEvent(function() {
SpecialPowers.setBoolPref("media.webaudio.enabled", true);
var context = new AudioContext();
- var buffer = context.createBuffer(1, 2048, context.sampleRate);
+ var buffer = context.createBuffer(1, 2048, 44100);
for (var i = 0; i < 2048; ++i) {
- buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / context.sampleRate);
+ buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / 44100);
}
var destination = context.destination;
diff --git a/content/media/webaudio/test/test_singleSourceDest.html b/content/media/webaudio/test/test_singleSourceDest.html
index b15a5a59a35d..87e3ef295b74 100644
--- a/content/media/webaudio/test/test_singleSourceDest.html
+++ b/content/media/webaudio/test/test_singleSourceDest.html
@@ -14,9 +14,9 @@ addLoadEvent(function() {
SpecialPowers.setBoolPref("media.webaudio.enabled", true);
var context = new AudioContext();
- var buffer = context.createBuffer(1, 2048, context.sampleRate);
+ var buffer = context.createBuffer(1, 2048, 44100);
for (var i = 0; i < 2048; ++i) {
- buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / context.sampleRate);
+ buffer.getChannelData(0)[i] = Math.sin(440 * 2 * Math.PI * i / 44100);
}
var destination = context.destination;
diff --git a/dom/bindings/Bindings.conf b/dom/bindings/Bindings.conf
index e34800f84030..32bd65d252f8 100644
--- a/dom/bindings/Bindings.conf
+++ b/dom/bindings/Bindings.conf
@@ -98,7 +98,6 @@ DOMInterfaces = {
},
'AudioBufferSourceNode': {
- 'implicitJSContext': [ 'start' ],
'wrapperCache': False
},
diff --git a/dom/webidl/AudioBufferSourceNode.webidl b/dom/webidl/AudioBufferSourceNode.webidl
index 158c73e5649d..2099c83a471d 100644
--- a/dom/webidl/AudioBufferSourceNode.webidl
+++ b/dom/webidl/AudioBufferSourceNode.webidl
@@ -27,9 +27,9 @@ interface AudioBufferSourceNode : AudioSourceNode {
//attribute AudioParam playbackRate;
//attribute boolean loop;
- [Throws]
- void start(optional double when = 0, optional double grainOffset = 0,
- optional double grainDuration);
- [Throws]
- void stop(optional double when = 0);
+ void start(double when);
+ //void start(double when, double grainOffset, double grainDuration);
+ void stop(double when);
+
};
+