Backed out 9 changesets (bug 1170958) for frequent test_getUserMedia_addTrackRemoveTrack.html failures

Backed out changeset 277c1f8098d1 (bug 1170958)
Backed out changeset aa86bb9eea95 (bug 1170958)
Backed out changeset 8af8b85a4b26 (bug 1170958)
Backed out changeset ec1bf225e9cb (bug 1170958)
Backed out changeset 4a04ddca2b6b (bug 1170958)
Backed out changeset e85c9977a311 (bug 1170958)
Backed out changeset 16b40ff04e8f (bug 1170958)
Backed out changeset ad206925c84a (bug 1170958)
Backed out changeset 2106eccec79b (bug 1170958)
This commit is contained in:
Wes Kocher 2015-09-25 13:08:55 -07:00
Родитель b4a25c9f58
Коммит 03a1803261
41 изменённых файлов: 373 добавлений и 909 удалений

Просмотреть файл

@ -42,6 +42,7 @@ class CameraPreviewMediaStream : public MediaStream
public: public:
explicit CameraPreviewMediaStream(DOMMediaStream* aWrapper); explicit CameraPreviewMediaStream(DOMMediaStream* aWrapper);
virtual CameraPreviewMediaStream* AsCameraPreviewStream() override { return this; };
virtual void AddAudioOutput(void* aKey) override; virtual void AddAudioOutput(void* aKey) override;
virtual void SetAudioOutputVolume(void* aKey, float aVolume) override; virtual void SetAudioOutputVolume(void* aKey, float aVolume) override;
virtual void RemoveAudioOutput(void* aKey) override; virtual void RemoveAudioOutput(void* aKey) override;

Просмотреть файл

@ -279,13 +279,8 @@ nsDOMCameraControl::nsDOMCameraControl(uint32_t aCameraId,
#endif #endif
mCurrentConfiguration = initialConfig.forget(); mCurrentConfiguration = initialConfig.forget();
// Register the playback listener directly on the camera input stream. // Attach our DOM-facing media stream to our viewfinder stream.
// We want as low latency as possible for the camera, thus avoiding InitStreamCommon(mInput);
// MediaStreamGraph altogether. Don't do the regular InitStreamCommon()
// to avoid initializing the Owned and Playback streams. This is OK since
// we are not user/DOM facing anyway.
CreateAndAddPlaybackStreamListener(mInput);
MOZ_ASSERT(mWindow, "Shouldn't be created with a null window!"); MOZ_ASSERT(mWindow, "Shouldn't be created with a null window!");
if (mWindow->GetExtantDoc()) { if (mWindow->GetExtantDoc()) {
CombineWithPrincipal(mWindow->GetExtantDoc()->NodePrincipal()); CombineWithPrincipal(mWindow->GetExtantDoc()->NodePrincipal());
@ -329,11 +324,6 @@ nsDOMCameraControl::~nsDOMCameraControl()
DOM_CAMERA_LOGT("%s:%d : this=%p\n", __func__, __LINE__, this); DOM_CAMERA_LOGT("%s:%d : this=%p\n", __func__, __LINE__, this);
/*invoke DOMMediaStream destroy*/ /*invoke DOMMediaStream destroy*/
Destroy(); Destroy();
if (mInput) {
mInput->Destroy();
mInput = nullptr;
}
} }
JSObject* JSObject*
@ -469,12 +459,6 @@ nsDOMCameraControl::Get(uint32_t aKey, nsTArray<CameraRegion>& aValue)
return NS_OK; return NS_OK;
} }
MediaStream*
nsDOMCameraControl::GetCameraStream() const
{
return mInput;
}
#define THROW_IF_NO_CAMERACONTROL(...) \ #define THROW_IF_NO_CAMERACONTROL(...) \
do { \ do { \
if (!mCameraControl) { \ if (!mCameraControl) { \

Просмотреть файл

@ -71,8 +71,6 @@ public:
nsPIDOMWindow* GetParentObject() const { return mWindow; } nsPIDOMWindow* GetParentObject() const { return mWindow; }
MediaStream* GetCameraStream() const override;
// Attributes. // Attributes.
void GetEffect(nsString& aEffect, ErrorResult& aRv); void GetEffect(nsString& aEffect, ErrorResult& aRv);
void SetEffect(const nsAString& aEffect, ErrorResult& aRv); void SetEffect(const nsAString& aEffect, ErrorResult& aRv);

Просмотреть файл

@ -542,7 +542,7 @@ HTMLCanvasElement::CaptureStream(const Optional<double>& aFrameRate,
return nullptr; return nullptr;
} }
stream->CreateOwnDOMTrack(videoTrackId, MediaSegment::VIDEO); stream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO);
RegisterFrameCaptureListener(stream->FrameCaptureListener()); RegisterFrameCaptureListener(stream->FrameCaptureListener());
return stream.forget(); return stream.forget();
} }

Просмотреть файл

@ -561,7 +561,7 @@ HTMLMediaElement::GetMozMediaSourceObject() const
already_AddRefed<DOMMediaStream> already_AddRefed<DOMMediaStream>
HTMLMediaElement::GetSrcObject() const HTMLMediaElement::GetSrcObject() const
{ {
NS_ASSERTION(!mSrcAttrStream || mSrcAttrStream->GetPlaybackStream(), NS_ASSERTION(!mSrcAttrStream || mSrcAttrStream->GetStream(),
"MediaStream should have been set up properly"); "MediaStream should have been set up properly");
nsRefPtr<DOMMediaStream> stream = mSrcAttrStream; nsRefPtr<DOMMediaStream> stream = mSrcAttrStream;
return stream.forget(); return stream.forget();
@ -585,7 +585,7 @@ HTMLMediaElement::SetSrcObject(DOMMediaStream* aValue)
already_AddRefed<DOMMediaStream> already_AddRefed<DOMMediaStream>
HTMLMediaElement::GetMozSrcObject() const HTMLMediaElement::GetMozSrcObject() const
{ {
NS_ASSERTION(!mSrcAttrStream || mSrcAttrStream->GetPlaybackStream(), NS_ASSERTION(!mSrcAttrStream || mSrcAttrStream->GetStream(),
"MediaStream should have been set up properly"); "MediaStream should have been set up properly");
nsRefPtr<DOMMediaStream> stream = mSrcAttrStream; nsRefPtr<DOMMediaStream> stream = mSrcAttrStream;
return stream.forget(); return stream.forget();
@ -979,8 +979,6 @@ void HTMLMediaElement::NotifyMediaTrackEnabled(MediaTrack* aTrack)
return; return;
} }
LOG(LogLevel::Debug, ("MediaElement %p MediaStreamTrack %p enabled", this));
// TODO: We are dealing with single audio track and video track for now. // TODO: We are dealing with single audio track and video track for now.
if (AudioTrack* track = aTrack->AsAudioTrack()) { if (AudioTrack* track = aTrack->AsAudioTrack()) {
if (!track->Enabled()) { if (!track->Enabled()) {
@ -999,8 +997,6 @@ void HTMLMediaElement::NotifyMediaStreamTracksAvailable(DOMMediaStream* aStream)
return; return;
} }
LOG(LogLevel::Debug, ("MediaElement %p MediaStream tracks available", this));
bool videoHasChanged = IsVideo() && HasVideo() != !VideoTracks()->IsEmpty(); bool videoHasChanged = IsVideo() && HasVideo() != !VideoTracks()->IsEmpty();
if (videoHasChanged) { if (videoHasChanged) {
@ -1880,17 +1876,17 @@ HTMLMediaElement::CaptureStreamInternal(bool aFinishWhenEnded,
mAudioCaptured = true; mAudioCaptured = true;
if (mDecoder) { if (mDecoder) {
mDecoder->AddOutputStream(out->mStream->GetInputStream()->AsProcessedStream(), mDecoder->AddOutputStream(out->mStream->GetStream()->AsProcessedStream(),
aFinishWhenEnded); aFinishWhenEnded);
if (mReadyState >= HAVE_METADATA) { if (mReadyState >= HAVE_METADATA) {
// Expose the tracks to JS directly. // Expose the tracks to JS directly.
if (HasAudio()) { if (HasAudio()) {
TrackID audioTrackId = mMediaInfo.mAudio.mTrackId; TrackID audioTrackId = mMediaInfo.mAudio.mTrackId;
out->mStream->CreateOwnDOMTrack(audioTrackId, MediaSegment::AUDIO); out->mStream->CreateDOMTrack(audioTrackId, MediaSegment::AUDIO);
} }
if (HasVideo()) { if (HasVideo()) {
TrackID videoTrackId = mMediaInfo.mVideo.mTrackId; TrackID videoTrackId = mMediaInfo.mVideo.mTrackId;
out->mStream->CreateOwnDOMTrack(videoTrackId, MediaSegment::VIDEO); out->mStream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO);
} }
} }
} }
@ -2450,7 +2446,7 @@ bool HTMLMediaElement::ParseAttribute(int32_t aNamespaceID,
mAudioChannel = audioChannel; mAudioChannel = audioChannel;
if (mSrcStream) { if (mSrcStream) {
nsRefPtr<MediaStream> stream = GetSrcMediaStream(); nsRefPtr<MediaStream> stream = mSrcStream->GetStream();
if (stream) { if (stream) {
stream->SetAudioChannelType(mAudioChannel); stream->SetAudioChannelType(mAudioChannel);
} }
@ -2865,7 +2861,7 @@ nsresult HTMLMediaElement::FinishDecoderSetup(MediaDecoder* aDecoder,
for (uint32_t i = 0; i < mOutputStreams.Length(); ++i) { for (uint32_t i = 0; i < mOutputStreams.Length(); ++i) {
OutputMediaStream* ms = &mOutputStreams[i]; OutputMediaStream* ms = &mOutputStreams[i];
aDecoder->AddOutputStream(ms->mStream->GetInputStream()->AsProcessedStream(), aDecoder->AddOutputStream(ms->mStream->GetStream()->AsProcessedStream(),
ms->mFinishWhenEnded); ms->mFinishWhenEnded);
} }
@ -3057,9 +3053,7 @@ public:
virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
StreamTime aTrackOffset, StreamTime aTrackOffset,
uint32_t aTrackEvents, uint32_t aTrackEvents,
const MediaSegment& aQueuedMedia, const MediaSegment& aQueuedMedia) override
MediaStream* aInputStream,
TrackID aInputTrackID) override
{ {
MutexAutoLock lock(mMutex); MutexAutoLock lock(mMutex);
if (mInitialSize != gfx::IntSize(0,0) || if (mInitialSize != gfx::IntSize(0,0) ||
@ -3109,10 +3103,10 @@ void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
if (!mSrcStream) { if (!mSrcStream) {
return; return;
} }
// We might be in cycle collection with mSrcStream->GetPlaybackStream() already // We might be in cycle collection with mSrcStream->GetStream() already
// returning null due to unlinking. // returning null due to unlinking.
MediaStream* stream = GetSrcMediaStream(); MediaStream* stream = mSrcStream->GetStream();
bool shouldPlay = !(aFlags & REMOVING_SRC_STREAM) && !mPaused && bool shouldPlay = !(aFlags & REMOVING_SRC_STREAM) && !mPaused &&
!mPausedForInactiveDocumentOrChannel && stream; !mPausedForInactiveDocumentOrChannel && stream;
if (shouldPlay == mSrcStreamIsPlaying) { if (shouldPlay == mSrcStreamIsPlaying) {
@ -3120,10 +3114,6 @@ void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
} }
mSrcStreamIsPlaying = shouldPlay; mSrcStreamIsPlaying = shouldPlay;
LOG(LogLevel::Debug, ("MediaElement %p %s playback of DOMMediaStream %p",
this, shouldPlay ? "Setting up" : "Removing",
mSrcStream.get()));
if (shouldPlay) { if (shouldPlay) {
mSrcStreamPausedCurrentTime = -1; mSrcStreamPausedCurrentTime = -1;
@ -3191,7 +3181,7 @@ void HTMLMediaElement::SetupSrcMediaStreamPlayback(DOMMediaStream* aStream)
return; return;
} }
nsRefPtr<MediaStream> stream = GetSrcMediaStream(); nsRefPtr<MediaStream> stream = mSrcStream->GetStream();
if (stream) { if (stream) {
stream->SetAudioChannelType(mAudioChannel); stream->SetAudioChannelType(mAudioChannel);
} }
@ -3286,6 +3276,18 @@ void HTMLMediaElement::MetadataLoaded(const MediaInfo* aInfo,
#endif // MOZ_EME #endif // MOZ_EME
} }
// Expose the tracks to JS directly.
for (OutputMediaStream& out : mOutputStreams) {
if (aInfo->HasAudio()) {
TrackID audioTrackId = aInfo->mAudio.mTrackId;
out.mStream->CreateDOMTrack(audioTrackId, MediaSegment::AUDIO);
}
if (aInfo->HasVideo()) {
TrackID videoTrackId = aInfo->mVideo.mTrackId;
out.mStream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO);
}
}
// If this element had a video track, but consists only of an audio track now, // If this element had a video track, but consists only of an audio track now,
// delete the VideoFrameContainer. This happens when the src is changed to an // delete the VideoFrameContainer. This happens when the src is changed to an
// audio only file. // audio only file.
@ -3580,8 +3582,6 @@ HTMLMediaElement::UpdateReadyStateInternal()
{ {
if (!mDecoder && !mSrcStream) { if (!mDecoder && !mSrcStream) {
// Not initialized - bail out. // Not initialized - bail out.
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
"Not initialized", this));
return; return;
} }
@ -3589,8 +3589,6 @@ HTMLMediaElement::UpdateReadyStateInternal()
// aNextFrame might have a next frame because the decoder can advance // aNextFrame might have a next frame because the decoder can advance
// on its own thread before MetadataLoaded gets a chance to run. // on its own thread before MetadataLoaded gets a chance to run.
// The arrival of more data can't change us out of this readyState. // The arrival of more data can't change us out of this readyState.
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
"Decoder ready state < HAVE_METADATA", this));
return; return;
} }
@ -3598,23 +3596,11 @@ HTMLMediaElement::UpdateReadyStateInternal()
bool hasAudio = !AudioTracks()->IsEmpty(); bool hasAudio = !AudioTracks()->IsEmpty();
bool hasVideo = !VideoTracks()->IsEmpty(); bool hasVideo = !VideoTracks()->IsEmpty();
if (!hasAudio && !hasVideo) { if ((!hasAudio && !hasVideo) ||
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() " (IsVideo() && hasVideo && !HasVideo())) {
"Stream with no tracks", this));
return; return;
} }
if (IsVideo() && hasVideo && !HasVideo()) {
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
"Stream waiting for video", this));
return;
}
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() Stream has "
"metadata; audioTracks=%d, videoTracks=%d, "
"hasVideoFrame=%d", this, AudioTracks()->Length(),
VideoTracks()->Length(), HasVideo()));
// We are playing a stream that has video and a video frame is now set. // We are playing a stream that has video and a video frame is now set.
// This means we have all metadata needed to change ready state. // This means we have all metadata needed to change ready state.
MediaInfo mediaInfo = mMediaInfo; MediaInfo mediaInfo = mMediaInfo;
@ -3628,8 +3614,6 @@ HTMLMediaElement::UpdateReadyStateInternal()
} }
if (NextFrameStatus() == MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_SEEKING) { if (NextFrameStatus() == MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_SEEKING) {
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
"NEXT_FRAME_UNAVAILABLE_SEEKING; Forcing HAVE_METADATA", this));
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA); ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA);
return; return;
} }
@ -3640,8 +3624,6 @@ HTMLMediaElement::UpdateReadyStateInternal()
// Also, if video became available after advancing to HAVE_CURRENT_DATA // Also, if video became available after advancing to HAVE_CURRENT_DATA
// while we are still playing, we need to revert to HAVE_METADATA until // while we are still playing, we need to revert to HAVE_METADATA until
// a video frame is available. // a video frame is available.
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
"Playing video but no video frame; Forcing HAVE_METADATA", this));
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA); ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA);
return; return;
} }
@ -3656,15 +3638,11 @@ HTMLMediaElement::UpdateReadyStateInternal()
// should remain at HAVE_CURRENT_DATA in this case. // should remain at HAVE_CURRENT_DATA in this case.
// Note that this state transition includes the case where we finished // Note that this state transition includes the case where we finished
// downloaded the whole data stream. // downloaded the whole data stream.
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
"Decoder download suspended by cache", this));
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA); ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA);
return; return;
} }
if (NextFrameStatus() != MediaDecoderOwner::NEXT_FRAME_AVAILABLE) { if (NextFrameStatus() != MediaDecoderOwner::NEXT_FRAME_AVAILABLE) {
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
"Next frame not available", this));
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA); ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA);
if (!mWaitingFired && NextFrameStatus() == MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_BUFFERING) { if (!mWaitingFired && NextFrameStatus() == MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_BUFFERING) {
FireTimeUpdate(false); FireTimeUpdate(false);
@ -3675,8 +3653,6 @@ HTMLMediaElement::UpdateReadyStateInternal()
} }
if (mSrcStream) { if (mSrcStream) {
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
"Stream HAVE_ENOUGH_DATA", this));
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA); ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA);
return; return;
} }
@ -3691,13 +3667,9 @@ HTMLMediaElement::UpdateReadyStateInternal()
// without stopping to buffer. // without stopping to buffer.
if (mDecoder->CanPlayThrough()) if (mDecoder->CanPlayThrough())
{ {
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
"Decoder can play through", this));
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA); ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA);
return; return;
} }
LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
"Default; Decoder has future data", this));
ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_FUTURE_DATA); ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_FUTURE_DATA);
} }
@ -4815,11 +4787,11 @@ NS_IMETHODIMP HTMLMediaElement::WindowAudioCaptureChanged()
MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER, MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER,
AudioChannel::Normal); AudioChannel::Normal);
if (GetSrcMediaStream()) { if (mSrcStream) {
mCaptureStreamPort = msg->ConnectToCaptureStream(id, GetSrcMediaStream()); mCaptureStreamPort = msg->ConnectToCaptureStream(id, mSrcStream->GetStream());
} else { } else {
nsRefPtr<DOMMediaStream> stream = CaptureStreamInternal(false, msg); nsRefPtr<DOMMediaStream> stream = CaptureStreamInternal(false, msg);
mCaptureStreamPort = msg->ConnectToCaptureStream(id, stream->GetPlaybackStream()); mCaptureStreamPort = msg->ConnectToCaptureStream(id, stream->GetStream());
} }
} else { } else {
mAudioCapturedByWindow = false; mAudioCapturedByWindow = false;
@ -4829,7 +4801,7 @@ NS_IMETHODIMP HTMLMediaElement::WindowAudioCaptureChanged()
MOZ_ASSERT(ps); MOZ_ASSERT(ps);
for (uint32_t i = 0; i < mOutputStreams.Length(); i++) { for (uint32_t i = 0; i < mOutputStreams.Length(); i++) {
if (mOutputStreams[i].mStream->GetPlaybackStream() == ps) { if (mOutputStreams[i].mStream->GetStream() == ps) {
mOutputStreams.RemoveElementAt(i); mOutputStreams.RemoveElementAt(i);
break; break;
} }

Просмотреть файл

@ -349,19 +349,12 @@ public:
/** /**
* This will return null if mSrcStream is null, or if mSrcStream is not * This will return null if mSrcStream is null, or if mSrcStream is not
* null but its GetPlaybackStream() returns null --- which can happen during * null but its GetStream() returns null --- which can happen during
* cycle collection unlinking! * cycle collection unlinking!
*/ */
MediaStream* GetSrcMediaStream() const MediaStream* GetSrcMediaStream() const
{ {
if (!mSrcStream) { return mSrcStream ? mSrcStream->GetStream() : nullptr;
return nullptr;
}
if (mSrcStream->GetCameraStream()) {
// XXX Remove this check with CameraPreviewMediaStream per bug 1124630.
return mSrcStream->GetCameraStream();
}
return mSrcStream->GetPlaybackStream();
} }
// WebIDL // WebIDL
@ -1100,6 +1093,11 @@ protected:
// Holds a reference to the stream connecting this stream to the capture sink. // Holds a reference to the stream connecting this stream to the capture sink.
nsRefPtr<MediaInputPort> mCaptureStreamPort; nsRefPtr<MediaInputPort> mCaptureStreamPort;
// Holds a reference to a stream with mSrcStream as input but intended for
// playback. Used so we don't block playback of other video elements
// playing the same mSrcStream.
nsRefPtr<DOMMediaStream> mPlaybackStream;
// Holds references to the DOM wrappers for the MediaStreams that we're // Holds references to the DOM wrappers for the MediaStreams that we're
// writing to. // writing to.
struct OutputMediaStream { struct OutputMediaStream {
@ -1108,8 +1106,8 @@ protected:
}; };
nsTArray<OutputMediaStream> mOutputStreams; nsTArray<OutputMediaStream> mOutputStreams;
// Holds a reference to the MediaStreamListener attached to mSrcStream's // Holds a reference to the MediaStreamListener attached to mPlaybackStream
// playback stream. // (or mSrcStream if mPlaybackStream is null).
nsRefPtr<StreamListener> mMediaStreamListener; nsRefPtr<StreamListener> mMediaStreamListener;
// Holds a reference to the size-getting MediaStreamListener attached to // Holds a reference to the size-getting MediaStreamListener attached to
// mSrcStream. // mSrcStream.

Просмотреть файл

@ -29,8 +29,8 @@ namespace mozilla
// We are mixing to mono until PeerConnection can accept stereo // We are mixing to mono until PeerConnection can accept stereo
static const uint32_t MONO = 1; static const uint32_t MONO = 1;
AudioCaptureStream::AudioCaptureStream(DOMMediaStream* aWrapper, TrackID aTrackId) AudioCaptureStream::AudioCaptureStream(DOMMediaStream* aWrapper)
: ProcessedMediaStream(aWrapper), mTrackId(aTrackId), mTrackCreated(false) : ProcessedMediaStream(aWrapper), mTrackCreated(false)
{ {
MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(NS_IsMainThread());
MOZ_COUNT_CTOR(AudioCaptureStream); MOZ_COUNT_CTOR(AudioCaptureStream);
@ -48,14 +48,14 @@ AudioCaptureStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
uint32_t aFlags) uint32_t aFlags)
{ {
uint32_t inputCount = mInputs.Length(); uint32_t inputCount = mInputs.Length();
StreamBuffer::Track* track = EnsureTrack(mTrackId); StreamBuffer::Track* track = EnsureTrack(AUDIO_TRACK);
// Notify the DOM everything is in order. // Notify the DOM everything is in order.
if (!mTrackCreated) { if (!mTrackCreated) {
for (uint32_t i = 0; i < mListeners.Length(); i++) { for (uint32_t i = 0; i < mListeners.Length(); i++) {
MediaStreamListener* l = mListeners[i]; MediaStreamListener* l = mListeners[i];
AudioSegment tmp; AudioSegment tmp;
l->NotifyQueuedTrackChanges( l->NotifyQueuedTrackChanges(
Graph(), mTrackId, 0, MediaStreamListener::TRACK_EVENT_CREATED, tmp); Graph(), AUDIO_TRACK, 0, MediaStreamListener::TRACK_EVENT_CREATED, tmp);
l->NotifyFinishedTrackCreation(Graph()); l->NotifyFinishedTrackCreation(Graph());
} }
mTrackCreated = true; mTrackCreated = true;
@ -127,6 +127,6 @@ AudioCaptureStream::MixerCallback(AudioDataValue* aMixedBuffer,
} }
// Now we have mixed data, simply append it to out track. // Now we have mixed data, simply append it to out track.
EnsureTrack(mTrackId)->Get<AudioSegment>()->AppendAndConsumeChunk(&chunk); EnsureTrack(AUDIO_TRACK)->Get<AudioSegment>()->AppendAndConsumeChunk(&chunk);
} }
} }

Просмотреть файл

@ -8,7 +8,6 @@
#include "MediaStreamGraph.h" #include "MediaStreamGraph.h"
#include "AudioMixer.h" #include "AudioMixer.h"
#include "StreamBuffer.h"
#include <algorithm> #include <algorithm>
namespace mozilla namespace mozilla
@ -23,17 +22,17 @@ class AudioCaptureStream : public ProcessedMediaStream,
public MixerCallbackReceiver public MixerCallbackReceiver
{ {
public: public:
explicit AudioCaptureStream(DOMMediaStream* aWrapper, TrackID aTrackId); explicit AudioCaptureStream(DOMMediaStream* aWrapper);
virtual ~AudioCaptureStream(); virtual ~AudioCaptureStream();
void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override; void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
protected: protected:
enum { AUDIO_TRACK = 1 };
void MixerCallback(AudioDataValue* aMixedBuffer, AudioSampleFormat aFormat, void MixerCallback(AudioDataValue* aMixedBuffer, AudioSampleFormat aFormat,
uint32_t aChannels, uint32_t aFrames, uint32_t aChannels, uint32_t aFrames,
uint32_t aSampleRate) override; uint32_t aSampleRate) override;
AudioMixer mMixer; AudioMixer mMixer;
TrackID mTrackId;
bool mTrackCreated; bool mTrackCreated;
}; };
} }

Просмотреть файл

@ -241,14 +241,14 @@ CanvasCaptureMediaStream::Init(const dom::Optional<double>& aFPS,
{ {
if (!aFPS.WasPassed()) { if (!aFPS.WasPassed()) {
mOutputStreamDriver = mOutputStreamDriver =
new AutoDriver(GetInputStream()->AsSourceStream(), aTrackId); new AutoDriver(GetStream()->AsSourceStream(), aTrackId);
} else if (aFPS.Value() < 0) { } else if (aFPS.Value() < 0) {
return NS_ERROR_ILLEGAL_VALUE; return NS_ERROR_ILLEGAL_VALUE;
} else { } else {
// Cap frame rate to 60 FPS for sanity // Cap frame rate to 60 FPS for sanity
double fps = std::min(60.0, aFPS.Value()); double fps = std::min(60.0, aFPS.Value());
mOutputStreamDriver = mOutputStreamDriver =
new TimerDriver(GetInputStream()->AsSourceStream(), fps, aTrackId); new TimerDriver(GetStream()->AsSourceStream(), fps, aTrackId);
} }
return NS_OK; return NS_OK;
} }

Просмотреть файл

@ -21,272 +21,115 @@
#include "VideoStreamTrack.h" #include "VideoStreamTrack.h"
#include "Layers.h" #include "Layers.h"
#ifdef LOG
#undef LOG
#endif
static PRLogModuleInfo* gMediaStreamLog;
#define LOG(type, msg) MOZ_LOG(gMediaStreamLog, type, msg)
using namespace mozilla; using namespace mozilla;
using namespace mozilla::dom; using namespace mozilla::dom;
using namespace mozilla::layers; using namespace mozilla::layers;
const TrackID TRACK_VIDEO_PRIMARY = 1; const TrackID TRACK_VIDEO_PRIMARY = 1;
/** class DOMMediaStream::StreamListener : public MediaStreamListener {
* TrackPort is a representation of a MediaStreamTrack-MediaInputPort pair
* that make up a link between the Owned stream and the Playback stream.
*
* Semantically, the track is the identifier/key and the port the value of this
* connection.
*
* The input port can be shared between several TrackPorts. This is the case
* for DOMMediaStream's mPlaybackPort which forwards all tracks in its
* mOwnedStream automatically.
*
* If the MediaStreamTrack is owned by another DOMMediaStream (called A) than
* the one owning the TrackPort (called B), the input port (locked to the
* MediaStreamTrack's TrackID) connects A's mOwnedStream to B's mPlaybackStream.
*
* A TrackPort may never leave the DOMMediaStream it was created in. Internal
* use only.
*/
class DOMMediaStream::TrackPort
{
public: public:
NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(TrackPort) explicit StreamListener(DOMMediaStream* aStream)
NS_DECL_CYCLE_COLLECTION_NATIVE_CLASS(TrackPort) : mStream(aStream)
{}
enum class InputPortOwnership { // Main thread only
OWNED = 1, void Forget() { mStream = nullptr; }
EXTERNAL DOMMediaStream* GetStream() { return mStream; }
class TrackChange : public nsRunnable {
public:
TrackChange(StreamListener* aListener,
TrackID aID, StreamTime aTrackOffset,
uint32_t aEvents, MediaSegment::Type aType)
: mListener(aListener), mID(aID), mEvents(aEvents), mType(aType)
{
}
NS_IMETHOD Run()
{
NS_ASSERTION(NS_IsMainThread(), "main thread only");
DOMMediaStream* stream = mListener->GetStream();
if (!stream) {
return NS_OK;
}
nsRefPtr<MediaStreamTrack> track;
if (mEvents & MediaStreamListener::TRACK_EVENT_CREATED) {
track = stream->BindDOMTrack(mID, mType);
if (!track) {
stream->CreateDOMTrack(mID, mType);
track = stream->BindDOMTrack(mID, mType);
}
stream->NotifyMediaStreamTrackCreated(track);
} else {
track = stream->GetDOMTrackFor(mID);
}
if (mEvents & MediaStreamListener::TRACK_EVENT_ENDED) {
if (track) {
track->NotifyEnded();
stream->NotifyMediaStreamTrackEnded(track);
} else {
NS_ERROR("track ended but not found");
}
}
return NS_OK;
}
StreamTime mEndTime;
nsRefPtr<StreamListener> mListener;
TrackID mID;
uint32_t mEvents;
MediaSegment::Type mType;
}; };
TrackPort(MediaInputPort* aInputPort,
MediaStreamTrack* aTrack,
const InputPortOwnership aOwnership)
: mInputPort(aInputPort)
, mTrack(aTrack)
, mOwnership(aOwnership)
{
MOZ_ASSERT(mInputPort);
MOZ_ASSERT(mTrack);
MOZ_COUNT_CTOR(TrackPort);
}
protected:
virtual ~TrackPort()
{
MOZ_COUNT_DTOR(TrackPort);
if (mOwnership == InputPortOwnership::OWNED && mInputPort) {
mInputPort->Destroy();
mInputPort = nullptr;
}
}
public:
void DestroyInputPort()
{
if (mInputPort) {
mInputPort->Destroy();
mInputPort = nullptr;
}
}
/** /**
* Returns the source stream of the input port. * Notify that changes to one of the stream tracks have been queued.
* aTrackEvents can be any combination of TRACK_EVENT_CREATED and
* TRACK_EVENT_ENDED. aQueuedMedia is the data being added to the track
* at aTrackOffset (relative to the start of the stream).
* aQueuedMedia can be null if there is no output.
*/ */
MediaStream* GetSource() const { return mInputPort ? mInputPort->GetSource() virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
: nullptr; } StreamTime aTrackOffset,
uint32_t aTrackEvents,
/** const MediaSegment& aQueuedMedia) override
* Returns the track ID this track is locked to in the source stream of the
* input port.
*/
TrackID GetSourceTrackId() const { return mInputPort ? mInputPort->GetSourceTrackId()
: TRACK_INVALID; }
MediaInputPort* GetInputPort() const { return mInputPort; }
MediaStreamTrack* GetTrack() const { return mTrack; }
private:
nsRefPtr<MediaInputPort> mInputPort;
nsRefPtr<MediaStreamTrack> mTrack;
// Defines if we've been given ownership of the input port or if it's owned
// externally. The owner is responsible for destroying the port.
const InputPortOwnership mOwnership;
};
NS_IMPL_CYCLE_COLLECTION(DOMMediaStream::TrackPort, mTrack)
NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(DOMMediaStream::TrackPort, AddRef)
NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(DOMMediaStream::TrackPort, Release)
/**
* Listener registered on the Owned stream to detect added and ended owned
* tracks for keeping the list of MediaStreamTracks in sync with the tracks
* added and ended directly at the source.
*/
class DOMMediaStream::OwnedStreamListener : public MediaStreamListener {
public:
explicit OwnedStreamListener(DOMMediaStream* aStream)
: mStream(aStream)
{}
void Forget() { mStream = nullptr; }
void DoNotifyTrackCreated(TrackID aTrackId, MediaSegment::Type aType)
{ {
MOZ_ASSERT(NS_IsMainThread()); if (aTrackEvents & (TRACK_EVENT_CREATED | TRACK_EVENT_ENDED)) {
nsRefPtr<TrackChange> runnable =
if (!mStream) { new TrackChange(this, aID, aTrackOffset, aTrackEvents,
return; aQueuedMedia.GetType());
}
MediaStreamTrack* track = mStream->FindOwnedDOMTrack(
mStream->GetOwnedStream(), aTrackId);
if (track) {
// This track has already been manually created. Abort.
return;
}
NS_WARN_IF_FALSE(!mStream->mTracks.IsEmpty(),
"A new track was detected on the input stream; creating a corresponding MediaStreamTrack. "
"Initial tracks should be added manually to immediately and synchronously be available to JS.");
mStream->CreateOwnDOMTrack(aTrackId, aType);
}
void DoNotifyTrackEnded(TrackID aTrackId)
{
MOZ_ASSERT(NS_IsMainThread());
if (!mStream) {
return;
}
nsRefPtr<MediaStreamTrack> track =
mStream->FindOwnedDOMTrack(mStream->GetOwnedStream(), aTrackId);
NS_ASSERTION(track, "Owned MediaStreamTracks must be known by the DOMMediaStream");
if (track) {
LOG(LogLevel::Debug, ("DOMMediaStream %p MediaStreamTrack %p ended at the source. Marking it ended.",
mStream, track.get()));
track->NotifyEnded();
}
}
void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
StreamTime aTrackOffset, uint32_t aTrackEvents,
const MediaSegment& aQueuedMedia,
MediaStream* aInputStream,
TrackID aInputTrackID) override
{
if (aTrackEvents & TRACK_EVENT_CREATED) {
nsCOMPtr<nsIRunnable> runnable =
NS_NewRunnableMethodWithArgs<TrackID, MediaSegment::Type>(
this, &OwnedStreamListener::DoNotifyTrackCreated,
aID, aQueuedMedia.GetType());
aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
} else if (aTrackEvents & TRACK_EVENT_ENDED) {
nsCOMPtr<nsIRunnable> runnable =
NS_NewRunnableMethodWithArgs<TrackID>(
this, &OwnedStreamListener::DoNotifyTrackEnded, aID);
aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget()); aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
} }
} }
private: class TracksCreatedRunnable : public nsRunnable {
// These fields may only be accessed on the main thread public:
DOMMediaStream* mStream; explicit TracksCreatedRunnable(StreamListener* aListener)
}; : mListener(aListener)
{
/**
* Listener registered on the Playback stream to detect when tracks end and when
* all new tracks this iteration have been created - for when several tracks are
* queued by the source and committed all at once.
*/
class DOMMediaStream::PlaybackStreamListener : public MediaStreamListener {
public:
explicit PlaybackStreamListener(DOMMediaStream* aStream)
: mStream(aStream)
{}
void Forget()
{
MOZ_ASSERT(NS_IsMainThread());
mStream = nullptr;
}
void DoNotifyTrackEnded(MediaStream* aInputStream,
TrackID aInputTrackID)
{
MOZ_ASSERT(NS_IsMainThread());
if (!mStream) {
return;
} }
LOG(LogLevel::Debug, ("DOMMediaStream %p Track %u of stream %p ended", NS_IMETHOD Run()
mStream, aInputTrackID, aInputStream)); {
MOZ_ASSERT(NS_IsMainThread());
nsRefPtr<MediaStreamTrack> track = DOMMediaStream* stream = mListener->GetStream();
mStream->FindPlaybackDOMTrack(aInputStream, aInputTrackID); if (!stream) {
if (!track) { return NS_OK;
LOG(LogLevel::Debug, ("DOMMediaStream %p Not a playback track.", mStream)); }
return;
stream->TracksCreated();
return NS_OK;
} }
LOG(LogLevel::Debug, ("DOMMediaStream %p Playback track; notifying stream listeners.", nsRefPtr<StreamListener> mListener;
mStream)); };
mStream->NotifyTrackRemoved(track);
nsRefPtr<TrackPort> endedPort = mStream->FindPlaybackTrackPort(*track); virtual void NotifyFinishedTrackCreation(MediaStreamGraph* aGraph) override
NS_ASSERTION(endedPort, "Playback track should have a TrackPort");
if (endedPort &&
endedPort->GetSourceTrackId() != TRACK_ANY &&
endedPort->GetSourceTrackId() != TRACK_INVALID &&
endedPort->GetSourceTrackId() != TRACK_NONE) {
// If a track connected to a locked-track input port ends, we destroy the
// port to allow our playback stream to finish.
// XXX (bug 1208316) This should not be necessary when MediaStreams don't
// finish but instead become inactive.
endedPort->DestroyInputPort();
}
}
void DoNotifyFinishedTrackCreation()
{ {
MOZ_ASSERT(NS_IsMainThread()); nsRefPtr<TracksCreatedRunnable> runnable = new TracksCreatedRunnable(this);
if (!mStream) {
return;
}
mStream->NotifyTracksCreated();
}
// The methods below are called on the MediaStreamGraph thread.
void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
StreamTime aTrackOffset, uint32_t aTrackEvents,
const MediaSegment& aQueuedMedia,
MediaStream* aInputStream,
TrackID aInputTrackID) override
{
if (aTrackEvents & TRACK_EVENT_ENDED) {
nsCOMPtr<nsIRunnable> runnable =
NS_NewRunnableMethodWithArgs<StorensRefPtrPassByPtr<MediaStream>, TrackID>(
this, &PlaybackStreamListener::DoNotifyTrackEnded, aInputStream, aInputTrackID);
aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
}
}
void NotifyFinishedTrackCreation(MediaStreamGraph* aGraph) override
{
nsCOMPtr<nsIRunnable> runnable =
NS_NewRunnableMethod(this, &PlaybackStreamListener::DoNotifyFinishedTrackCreation);
aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget()); aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
} }
@ -301,7 +144,6 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(DOMMediaStream,
DOMEventTargetHelper) DOMEventTargetHelper)
tmp->Destroy(); tmp->Destroy();
NS_IMPL_CYCLE_COLLECTION_UNLINK(mWindow) NS_IMPL_CYCLE_COLLECTION_UNLINK(mWindow)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mOwnedTracks)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mTracks) NS_IMPL_CYCLE_COLLECTION_UNLINK(mTracks)
NS_IMPL_CYCLE_COLLECTION_UNLINK(mConsumersToKeepAlive) NS_IMPL_CYCLE_COLLECTION_UNLINK(mConsumersToKeepAlive)
NS_IMPL_CYCLE_COLLECTION_UNLINK_END NS_IMPL_CYCLE_COLLECTION_UNLINK_END
@ -309,7 +151,6 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_END
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(DOMMediaStream, NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(DOMMediaStream,
DOMEventTargetHelper) DOMEventTargetHelper)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mWindow) NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mWindow)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOwnedTracks)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mTracks) NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mTracks)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mConsumersToKeepAlive) NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mConsumersToKeepAlive)
NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
@ -338,19 +179,14 @@ NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(DOMAudioNodeMediaStream)
NS_INTERFACE_MAP_END_INHERITING(DOMMediaStream) NS_INTERFACE_MAP_END_INHERITING(DOMMediaStream)
DOMMediaStream::DOMMediaStream() DOMMediaStream::DOMMediaStream()
: mLogicalStreamStartTime(0), mInputStream(nullptr), mOwnedStream(nullptr), : mLogicalStreamStartTime(0),
mPlaybackStream(nullptr), mOwnedPort(nullptr), mPlaybackPort(nullptr), mStream(nullptr), mTracksCreated(false),
mTracksCreated(false), mNotifiedOfMediaStreamGraphShutdown(false), mNotifiedOfMediaStreamGraphShutdown(false), mCORSMode(CORS_NONE)
mCORSMode(CORS_NONE)
{ {
nsresult rv; nsresult rv;
nsCOMPtr<nsIUUIDGenerator> uuidgen = nsCOMPtr<nsIUUIDGenerator> uuidgen =
do_GetService("@mozilla.org/uuid-generator;1", &rv); do_GetService("@mozilla.org/uuid-generator;1", &rv);
if (!gMediaStreamLog) {
gMediaStreamLog = PR_NewLogModule("MediaStream");
}
if (NS_SUCCEEDED(rv) && uuidgen) { if (NS_SUCCEEDED(rv) && uuidgen) {
nsID uuid; nsID uuid;
memset(&uuid, 0, sizeof(uuid)); memset(&uuid, 0, sizeof(uuid));
@ -371,34 +207,13 @@ DOMMediaStream::~DOMMediaStream()
void void
DOMMediaStream::Destroy() DOMMediaStream::Destroy()
{ {
LOG(LogLevel::Debug, ("DOMMediaStream %p Being destroyed.", this)); if (mListener) {
if (mOwnedListener) { mListener->Forget();
mOwnedListener->Forget(); mListener = nullptr;
mOwnedListener = nullptr;
} }
if (mPlaybackListener) { if (mStream) {
mPlaybackListener->Forget(); mStream->Destroy();
mPlaybackListener = nullptr; mStream = nullptr;
}
if (mPlaybackPort) {
mPlaybackPort->Destroy();
mPlaybackPort = nullptr;
}
if (mOwnedPort) {
mOwnedPort->Destroy();
mOwnedPort = nullptr;
}
if (mPlaybackStream) {
mPlaybackStream->Destroy();
mPlaybackStream = nullptr;
}
if (mOwnedStream) {
mOwnedStream->Destroy();
mOwnedStream = nullptr;
}
if (mInputStream) {
mInputStream->Destroy();
mInputStream = nullptr;
} }
} }
@ -411,11 +226,11 @@ DOMMediaStream::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
double double
DOMMediaStream::CurrentTime() DOMMediaStream::CurrentTime()
{ {
if (!mPlaybackStream) { if (!mStream) {
return 0.0; return 0.0;
} }
return mPlaybackStream-> return mStream->
StreamTimeToSeconds(mPlaybackStream->GetCurrentTime() - mLogicalStreamStartTime); StreamTimeToSeconds(mStream->GetCurrentTime() - mLogicalStreamStartTime);
} }
void void
@ -427,8 +242,8 @@ DOMMediaStream::GetId(nsAString& aID) const
void void
DOMMediaStream::GetAudioTracks(nsTArray<nsRefPtr<AudioStreamTrack> >& aTracks) DOMMediaStream::GetAudioTracks(nsTArray<nsRefPtr<AudioStreamTrack> >& aTracks)
{ {
for (const nsRefPtr<TrackPort>& info : mTracks) { for (uint32_t i = 0; i < mTracks.Length(); ++i) {
AudioStreamTrack* t = info->GetTrack()->AsAudioStreamTrack(); AudioStreamTrack* t = mTracks[i]->AsAudioStreamTrack();
if (t) { if (t) {
aTracks.AppendElement(t); aTracks.AppendElement(t);
} }
@ -438,8 +253,8 @@ DOMMediaStream::GetAudioTracks(nsTArray<nsRefPtr<AudioStreamTrack> >& aTracks)
void void
DOMMediaStream::GetVideoTracks(nsTArray<nsRefPtr<VideoStreamTrack> >& aTracks) DOMMediaStream::GetVideoTracks(nsTArray<nsRefPtr<VideoStreamTrack> >& aTracks)
{ {
for (const nsRefPtr<TrackPort>& info : mTracks) { for (uint32_t i = 0; i < mTracks.Length(); ++i) {
VideoStreamTrack* t = info->GetTrack()->AsVideoStreamTrack(); VideoStreamTrack* t = mTracks[i]->AsVideoStreamTrack();
if (t) { if (t) {
aTracks.AppendElement(t); aTracks.AppendElement(t);
} }
@ -449,27 +264,19 @@ DOMMediaStream::GetVideoTracks(nsTArray<nsRefPtr<VideoStreamTrack> >& aTracks)
void void
DOMMediaStream::GetTracks(nsTArray<nsRefPtr<MediaStreamTrack> >& aTracks) DOMMediaStream::GetTracks(nsTArray<nsRefPtr<MediaStreamTrack> >& aTracks)
{ {
for (const nsRefPtr<TrackPort>& info : mTracks) { aTracks.AppendElements(mTracks);
aTracks.AppendElement(info->GetTrack());
}
} }
bool bool
DOMMediaStream::HasTrack(const MediaStreamTrack& aTrack) const DOMMediaStream::HasTrack(const MediaStreamTrack& aTrack) const
{ {
return !!FindPlaybackDOMTrack(aTrack.GetStream()->GetOwnedStream(), aTrack.GetTrackID()); return mTracks.Contains(&aTrack);
}
bool
DOMMediaStream::OwnsTrack(const MediaStreamTrack& aTrack) const
{
return (aTrack.GetStream() == this) && HasTrack(aTrack);
} }
bool bool
DOMMediaStream::IsFinished() DOMMediaStream::IsFinished()
{ {
return !mPlaybackStream || mPlaybackStream->IsFinished(); return !mStream || mStream->IsFinished();
} }
void void
@ -477,7 +284,7 @@ DOMMediaStream::InitSourceStream(nsIDOMWindow* aWindow,
MediaStreamGraph* aGraph) MediaStreamGraph* aGraph)
{ {
mWindow = aWindow; mWindow = aWindow;
InitStreamCommon(aGraph->CreateSourceStream(nullptr), aGraph); InitStreamCommon(aGraph->CreateSourceStream(this));
} }
void void
@ -485,7 +292,8 @@ DOMMediaStream::InitTrackUnionStream(nsIDOMWindow* aWindow,
MediaStreamGraph* aGraph) MediaStreamGraph* aGraph)
{ {
mWindow = aWindow; mWindow = aWindow;
InitStreamCommon(aGraph->CreateTrackUnionStream(nullptr), aGraph);
InitStreamCommon(aGraph->CreateTrackUnionStream(this));
} }
void void
@ -494,36 +302,17 @@ DOMMediaStream::InitAudioCaptureStream(nsIDOMWindow* aWindow,
{ {
mWindow = aWindow; mWindow = aWindow;
const TrackID AUDIO_TRACK = 1; InitStreamCommon(aGraph->CreateAudioCaptureStream(this));
InitStreamCommon(aGraph->CreateAudioCaptureStream(this, AUDIO_TRACK), aGraph);
CreateOwnDOMTrack(AUDIO_TRACK, MediaSegment::AUDIO);
} }
void void
DOMMediaStream::InitStreamCommon(MediaStream* aStream, DOMMediaStream::InitStreamCommon(MediaStream* aStream)
MediaStreamGraph* aGraph)
{ {
mInputStream = aStream; mStream = aStream;
// We pass null as the wrapper since it is only used to signal finished // Setup track listener
// streams. This is only needed for the playback stream. mListener = new StreamListener(this);
mOwnedStream = aGraph->CreateTrackUnionStream(nullptr); aStream->AddListener(mListener);
mOwnedStream->SetAutofinish(true);
mOwnedPort = mOwnedStream->AllocateInputPort(mInputStream);
mPlaybackStream = aGraph->CreateTrackUnionStream(this);
mPlaybackStream->SetAutofinish(true);
mPlaybackPort = mPlaybackStream->AllocateInputPort(mOwnedStream);
LOG(LogLevel::Debug, ("DOMMediaStream %p Initiated with mInputStream=%p, mOwnedStream=%p, mPlaybackStream=%p",
this, mInputStream, mOwnedStream, mPlaybackStream));
// Setup track listeners
mOwnedListener = new OwnedStreamListener(this);
mOwnedStream->AddListener(mOwnedListener);
mPlaybackListener = new PlaybackStreamListener(this);
mPlaybackStream->AddListener(mPlaybackListener);
} }
already_AddRefed<DOMMediaStream> already_AddRefed<DOMMediaStream>
@ -556,16 +345,16 @@ DOMMediaStream::CreateAudioCaptureStream(nsIDOMWindow* aWindow,
void void
DOMMediaStream::SetTrackEnabled(TrackID aTrackID, bool aEnabled) DOMMediaStream::SetTrackEnabled(TrackID aTrackID, bool aEnabled)
{ {
if (mOwnedStream) { if (mStream) {
mOwnedStream->SetTrackEnabled(aTrackID, aEnabled); mStream->SetTrackEnabled(aTrackID, aEnabled);
} }
} }
void void
DOMMediaStream::StopTrack(TrackID aTrackID) DOMMediaStream::StopTrack(TrackID aTrackID)
{ {
if (mInputStream && mInputStream->AsSourceStream()) { if (mStream && mStream->AsSourceStream()) {
mInputStream->AsSourceStream()->EndTrack(aTrackID); mStream->AsSourceStream()->EndTrack(aTrackID);
} }
} }
@ -631,10 +420,8 @@ DOMMediaStream::RemovePrincipalChangeObserver(PrincipalChangeObserver* aObserver
} }
MediaStreamTrack* MediaStreamTrack*
DOMMediaStream::CreateOwnDOMTrack(TrackID aTrackID, MediaSegment::Type aType) DOMMediaStream::CreateDOMTrack(TrackID aTrackID, MediaSegment::Type aType)
{ {
MOZ_ASSERT(FindOwnedDOMTrack(GetOwnedStream(), aTrackID) == nullptr);
MediaStreamTrack* track; MediaStreamTrack* track;
switch (aType) { switch (aType) {
case MediaSegment::AUDIO: case MediaSegment::AUDIO:
@ -646,53 +433,52 @@ DOMMediaStream::CreateOwnDOMTrack(TrackID aTrackID, MediaSegment::Type aType)
default: default:
MOZ_CRASH("Unhandled track type"); MOZ_CRASH("Unhandled track type");
} }
mTracks.AppendElement(track);
LOG(LogLevel::Debug, ("DOMMediaStream %p Created new track %p with ID %u", this, track, aTrackID));
nsRefPtr<TrackPort> ownedTrackPort =
new TrackPort(mOwnedPort, track, TrackPort::InputPortOwnership::EXTERNAL);
mOwnedTracks.AppendElement(ownedTrackPort.forget());
nsRefPtr<TrackPort> playbackTrackPort =
new TrackPort(mPlaybackPort, track, TrackPort::InputPortOwnership::EXTERNAL);
mTracks.AppendElement(playbackTrackPort.forget());
NotifyMediaStreamTrackCreated(track);
return track; return track;
} }
MediaStreamTrack* MediaStreamTrack*
DOMMediaStream::FindOwnedDOMTrack(MediaStream* aOwningStream, TrackID aTrackID) const DOMMediaStream::BindDOMTrack(TrackID aTrackID, MediaSegment::Type aType)
{ {
if (aOwningStream != mOwnedStream) { MediaStreamTrack* track = nullptr;
return nullptr; bool bindSuccess = false;
} switch (aType) {
case MediaSegment::AUDIO: {
for (const nsRefPtr<TrackPort>& info : mOwnedTracks) { for (size_t i = 0; i < mTracks.Length(); ++i) {
if (info->GetTrack()->GetTrackID() == aTrackID) { track = mTracks[i]->AsAudioStreamTrack();
return info->GetTrack(); if (track && track->GetTrackID() == aTrackID) {
bindSuccess = true;
break;
}
} }
break;
} }
return nullptr; case MediaSegment::VIDEO: {
for (size_t i = 0; i < mTracks.Length(); ++i) {
track = mTracks[i]->AsVideoStreamTrack();
if (track && track->GetTrackID() == aTrackID) {
bindSuccess = true;
break;
}
}
break;
}
default:
MOZ_CRASH("Unhandled track type");
}
return bindSuccess ? track : nullptr;
} }
MediaStreamTrack* MediaStreamTrack*
DOMMediaStream::FindPlaybackDOMTrack(MediaStream* aInputStream, TrackID aInputTrackID) const DOMMediaStream::GetDOMTrackFor(TrackID aTrackID)
{ {
for (const nsRefPtr<TrackPort>& info : mTracks) { for (uint32_t i = 0; i < mTracks.Length(); ++i) {
if (info->GetInputPort() == mPlaybackPort && MediaStreamTrack* t = mTracks[i];
aInputStream == mOwnedStream && // We may add streams to our track list that are actually owned by
aInputTrackID == info->GetTrack()->GetTrackID()) { // a different DOMMediaStream. Ignore those.
// This track is in our owned and playback streams. if (t->GetTrackID() == aTrackID && t->GetStream() == this) {
return info->GetTrack(); return t;
}
if (info->GetInputPort()->GetSource() == aInputStream &&
info->GetSourceTrackId() == aInputTrackID) {
// This track is owned externally but in our playback stream.
MOZ_ASSERT(aInputTrackID != TRACK_NONE);
MOZ_ASSERT(aInputTrackID != TRACK_INVALID);
MOZ_ASSERT(aInputTrackID != TRACK_ANY);
return info->GetTrack();
} }
} }
return nullptr; return nullptr;
@ -731,6 +517,7 @@ DOMMediaStream::OnTracksAvailable(OnTracksAvailableCallback* aRunnable)
void void
DOMMediaStream::TracksCreated() DOMMediaStream::TracksCreated()
{ {
MOZ_ASSERT(!mTracks.IsEmpty());
mTracksCreated = true; mTracksCreated = true;
CheckTracksAvailable(); CheckTracksAvailable();
} }
@ -749,14 +536,6 @@ DOMMediaStream::CheckTracksAvailable()
} }
} }
void
DOMMediaStream::CreateAndAddPlaybackStreamListener(MediaStream* aStream)
{
MOZ_ASSERT(GetCameraStream(), "I'm a hack. Only DOMCameraControl may use me.");
mPlaybackListener = new PlaybackStreamListener(this);
aStream->AddListener(mPlaybackListener);
}
already_AddRefed<AudioTrack> already_AddRefed<AudioTrack>
DOMMediaStream::CreateAudioTrack(AudioStreamTrack* aStreamTrack) DOMMediaStream::CreateAudioTrack(AudioStreamTrack* aStreamTrack)
{ {
@ -792,11 +571,11 @@ DOMMediaStream::ConstructMediaTracks(AudioTrackList* aAudioTrackList,
mMediaTrackListListeners.AppendElement(videoListener); mMediaTrackListListeners.AppendElement(videoListener);
int firstEnabledVideo = -1; int firstEnabledVideo = -1;
for (const nsRefPtr<TrackPort>& info : mTracks) { for (uint32_t i = 0; i < mTracks.Length(); ++i) {
if (AudioStreamTrack* t = info->GetTrack()->AsAudioStreamTrack()) { if (AudioStreamTrack* t = mTracks[i]->AsAudioStreamTrack()) {
nsRefPtr<AudioTrack> track = CreateAudioTrack(t); nsRefPtr<AudioTrack> track = CreateAudioTrack(t);
aAudioTrackList->AddTrack(track); aAudioTrackList->AddTrack(track);
} else if (VideoStreamTrack* t = info->GetTrack()->AsVideoStreamTrack()) { } else if (VideoStreamTrack* t = mTracks[i]->AsVideoStreamTrack()) {
nsRefPtr<VideoTrack> track = CreateVideoTrack(t); nsRefPtr<VideoTrack> track = CreateVideoTrack(t);
aVideoTrackList->AddTrack(track); aVideoTrackList->AddTrack(track);
firstEnabledVideo = (t->Enabled() && firstEnabledVideo < 0) firstEnabledVideo = (t->Enabled() && firstEnabledVideo < 0)
@ -857,7 +636,7 @@ DOMMediaStream::NotifyMediaStreamTrackEnded(MediaStreamTrack* aTrack)
DOMLocalMediaStream::~DOMLocalMediaStream() DOMLocalMediaStream::~DOMLocalMediaStream()
{ {
if (mInputStream) { if (mStream) {
// Make sure Listeners of this stream know it's going away // Make sure Listeners of this stream know it's going away
Stop(); Stop();
} }
@ -872,8 +651,8 @@ DOMLocalMediaStream::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProt
void void
DOMLocalMediaStream::Stop() DOMLocalMediaStream::Stop()
{ {
if (mInputStream && mInputStream->AsSourceStream()) { if (mStream && mStream->AsSourceStream()) {
mInputStream->AsSourceStream()->EndAllTrackAndFinish(); mStream->AsSourceStream()->EndAllTrackAndFinish();
} }
} }
@ -948,7 +727,7 @@ DOMHwMediaStream::CreateHwStream(nsIDOMWindow* aWindow)
MediaStreamGraph::GetInstance(MediaStreamGraph::SYSTEM_THREAD_DRIVER, MediaStreamGraph::GetInstance(MediaStreamGraph::SYSTEM_THREAD_DRIVER,
AudioChannel::Normal); AudioChannel::Normal);
stream->InitSourceStream(aWindow, graph); stream->InitSourceStream(aWindow, graph);
stream->Init(stream->GetInputStream()); stream->Init(stream->GetStream());
return stream.forget(); return stream.forget();
} }
@ -1001,7 +780,7 @@ DOMHwMediaStream::SetImageSize(uint32_t width, uint32_t height)
mOverlayImage->SetData(imgData); mOverlayImage->SetData(imgData);
#endif #endif
SourceMediaStream* srcStream = GetInputStream()->AsSourceStream(); SourceMediaStream* srcStream = GetStream()->AsSourceStream();
StreamBuffer::Track* track = srcStream->FindTrack(TRACK_VIDEO_PRIMARY); StreamBuffer::Track* track = srcStream->FindTrack(TRACK_VIDEO_PRIMARY);
if (!track || !track->GetSegment()) { if (!track || !track->GetSegment()) {

Просмотреть файл

@ -35,9 +35,7 @@ class DOMHwMediaStream;
class DOMLocalMediaStream; class DOMLocalMediaStream;
class MediaStream; class MediaStream;
class MediaEngineSource; class MediaEngineSource;
class MediaInputPort;
class MediaStreamGraph; class MediaStreamGraph;
class ProcessedMediaStream;
namespace dom { namespace dom {
class AudioNode; class AudioNode;
@ -66,114 +64,9 @@ class MediaStreamDirectListener;
/** /**
* DOM wrapper for MediaStreams. * DOM wrapper for MediaStreams.
*
* To account for track operations such as clone(), addTrack() and
* removeTrack(), a DOMMediaStream wraps three internal (and chained)
* MediaStreams:
* 1. mInputStream
* - Controlled by the owner/source of the DOMMediaStream.
* It's a stream of the type indicated by
* - DOMMediaStream::CreateSourceStream/CreateTrackUnionStream. A source
* typically creates its DOMMediaStream, creates the MediaStreamTracks
* owned by said stream, then gets the internal input stream to which it
* feeds data for the previously created tracks.
* - When necessary it can create tracks on the internal stream only and
* their corresponding MediaStreamTracks will be asynchronously created.
* 2. mOwnedStream
* - A TrackUnionStream containing tracks owned by this stream.
* - The internal model of a MediaStreamTrack consists of its owning
* DOMMediaStream and the TrackID of the corresponding internal track in
* the owning DOMMediaStream's mOwnedStream.
* - The owned stream is different from the input stream since a cloned
* DOMMediaStream is also the owner of its (cloned) MediaStreamTracks.
* - Stopping an original track shall not stop its clone. This is
* solved by stopping it at the owned stream, while the clone's owned
* stream gets data directly from the original input stream.
* - A DOMMediaStream (original or clone) gets all tracks dynamically
* added by the source automatically forwarded by having a TRACK_ANY
* MediaInputPort set up from the owning DOMMediaStream's input stream
* to this DOMMediaStream's owned stream.
* 3. mPlaybackStream
* - A TrackUnionStream containing the tracks corresponding to the
* MediaStreamTracks currently in this DOMMediaStream (per getTracks()).
* - Similarly as for mOwnedStream, there's a TRACK_ANY MediaInputPort set
* up from the owned stream to the playback stream to allow tracks
* dynamically added by the source to be automatically forwarded to any
* audio or video sinks.
* - MediaStreamTracks added by addTrack() are set up with a MediaInputPort
* locked to their internal TrackID, from their owning DOMMediaStream's
* owned stream to this playback stream.
*
*
* A graphical representation of how tracks are connected in various cases as
* follows:
*
* addTrack()ed case:
* DOMStream A
* Input Owned Playback
* t1 ---------> t1 ------------> t1 <- MediaStreamTrack X
* (pointing to t1 in A)
* --------> t2 <- MediaStreamTrack Y
* / (pointing to t1 in B)
* DOMStream B /
* Input Owned / Playback
* t1 ---------> t1 ------------> t1 <- MediaStreamTrack Y
* (pointing to t1 in B)
*
* removeTrack()ed case:
* DOMStream A
* Input Owned Playback
* t1 ---------> t1 <- No tracks
*
*
* clone()d case:
* DOMStream A
* Input Owned Playback
* t1 ---------> t1 ------------> t1 <- MediaStreamTrack X
* \ (pointing to t1 in A)
* -----
* DOMStream B \
* Input \ Owned Playback
* -> t1 ------------> t1 <- MediaStreamTrack Y
* (pointing to t1 in B)
*
*
* addTrack()ed, removeTrack()ed and clone()d case:
*
* Here we have done the following:
* var A = someStreamWithTwoTracks;
* var B = someStreamWithOneTrack;
* var X = A.getTracks()[0];
* var Y = A.getTracks()[1];
* var Z = B.getTracks()[0];
* A.addTrack(Z);
* A.removeTrack(X);
* B.removeTrack(Z);
* var A' = A.clone();
*
* DOMStream A
* Input Owned Playback
* t1 ---------> t1 <- MediaStreamTrack X (removed)
* (pointing to t1 in A)
* t2 ---------> t2 ------------> t2 <- MediaStreamTrack Y
* \ (pointing to t2 in A)
* \ ------> t3 <- MediaStreamTrack Z
* \ / (pointing to t1 in B)
* DOMStream B \ /
* Input \ Owned / Playback
* t1 ---^-----> t1 --- <- MediaStreamTrack Z (removed)
* \ \ (pointing to t1 in B)
* \ \
* DOMStream A' \ \
* Input \ \ Owned Playback
* \ -> t1 ------------> t1 <- MediaStreamTrack Y'
* \ (pointing to t1 in A')
* ----> t2 ------------> t2 <- MediaStreamTrack Z'
* (pointing to t2 in A')
*/ */
class DOMMediaStream : public DOMEventTargetHelper class DOMMediaStream : public DOMEventTargetHelper
{ {
class TrackPort;
friend class DOMLocalMediaStream; friend class DOMLocalMediaStream;
typedef dom::MediaStreamTrack MediaStreamTrack; typedef dom::MediaStreamTrack MediaStreamTrack;
typedef dom::AudioStreamTrack AudioStreamTrack; typedef dom::AudioStreamTrack AudioStreamTrack;
@ -210,38 +103,9 @@ public:
void GetAudioTracks(nsTArray<nsRefPtr<AudioStreamTrack> >& aTracks); void GetAudioTracks(nsTArray<nsRefPtr<AudioStreamTrack> >& aTracks);
void GetVideoTracks(nsTArray<nsRefPtr<VideoStreamTrack> >& aTracks); void GetVideoTracks(nsTArray<nsRefPtr<VideoStreamTrack> >& aTracks);
void GetTracks(nsTArray<nsRefPtr<MediaStreamTrack> >& aTracks); void GetTracks(nsTArray<nsRefPtr<MediaStreamTrack> >& aTracks);
// NON-WebIDL
/**
* Returns true if this DOMMediaStream has aTrack in its mPlaybackStream.
*/
bool HasTrack(const MediaStreamTrack& aTrack) const; bool HasTrack(const MediaStreamTrack& aTrack) const;
/** MediaStream* GetStream() const { return mStream; }
* Returns true if this DOMMediaStream owns aTrack.
*/
bool OwnsTrack(const MediaStreamTrack& aTrack) const;
/**
* Returns the corresponding MediaStreamTrack if it's in our mOwnedStream.
*/
MediaStreamTrack* FindOwnedDOMTrack(MediaStream* aOwningStream, TrackID aTrackID) const;
/**
* Returns the corresponding MediaStreamTrack if it's in our mPlaybackStream.
*/
MediaStreamTrack* FindPlaybackDOMTrack(MediaStream* aOwningStream, TrackID aTrackID) const;
MediaStream* GetInputStream() const { return mInputStream; }
ProcessedMediaStream* GetOwnedStream() const { return mOwnedStream; }
ProcessedMediaStream* GetPlaybackStream() const { return mPlaybackStream; }
/**
* Allows a video element to identify this stream as a camera stream, which
* needs special treatment.
*/
virtual MediaStream* GetCameraStream() const { return nullptr; }
/** /**
* Overridden in DOMLocalMediaStreams to allow getUserMedia to pass * Overridden in DOMLocalMediaStreams to allow getUserMedia to pass
@ -353,13 +217,11 @@ public:
mLogicalStreamStartTime = aTime; mLogicalStreamStartTime = aTime;
} }
/** // Notifications from StreamListener.
* Called for each track in our owned stream to indicate to JS that we // BindDOMTrack should only be called when it's safe to run script.
* are carrying that track. MediaStreamTrack* BindDOMTrack(TrackID aTrackID, MediaSegment::Type aType);
* MediaStreamTrack* CreateDOMTrack(TrackID aTrackID, MediaSegment::Type aType);
* Creates a MediaStreamTrack, adds it to mTracks and returns it. MediaStreamTrack* GetDOMTrackFor(TrackID aTrackID);
*/
MediaStreamTrack* CreateOwnDOMTrack(TrackID aTrackID, MediaSegment::Type aType);
class OnTracksAvailableCallback { class OnTracksAvailableCallback {
public: public:
@ -409,10 +271,13 @@ protected:
virtual ~DOMMediaStream(); virtual ~DOMMediaStream();
void Destroy(); void Destroy();
void InitSourceStream(nsIDOMWindow* aWindow, MediaStreamGraph* aGraph); void InitSourceStream(nsIDOMWindow* aWindow,
void InitTrackUnionStream(nsIDOMWindow* aWindow, MediaStreamGraph* aGraph); MediaStreamGraph* aGraph);
void InitAudioCaptureStream(nsIDOMWindow* aWindow, MediaStreamGraph* aGraph); void InitTrackUnionStream(nsIDOMWindow* aWindow,
void InitStreamCommon(MediaStream* aStream, MediaStreamGraph* aGraph); MediaStreamGraph* aGraph);
void InitAudioCaptureStream(nsIDOMWindow* aWindow,
MediaStreamGraph* aGraph);
void InitStreamCommon(MediaStream* aStream);
already_AddRefed<AudioTrack> CreateAudioTrack(AudioStreamTrack* aStreamTrack); already_AddRefed<AudioTrack> CreateAudioTrack(AudioStreamTrack* aStreamTrack);
already_AddRefed<VideoTrack> CreateVideoTrack(VideoStreamTrack* aStreamTrack); already_AddRefed<VideoTrack> CreateVideoTrack(VideoStreamTrack* aStreamTrack);
@ -422,14 +287,8 @@ protected:
void CheckTracksAvailable(); void CheckTracksAvailable();
class OwnedStreamListener; class StreamListener;
friend class OwnedStreamListener; friend class StreamListener;
class PlaybackStreamListener;
friend class PlaybackStreamListener;
// XXX Bug 1124630. Remove with CameraPreviewMediaStream.
void CreateAndAddPlaybackStreamListener(MediaStream*);
// StreamTime at which the currentTime attribute would return 0. // StreamTime at which the currentTime attribute would return 0.
StreamTime mLogicalStreamStartTime; StreamTime mLogicalStreamStartTime;
@ -437,41 +296,16 @@ protected:
// We need this to track our parent object. // We need this to track our parent object.
nsCOMPtr<nsIDOMWindow> mWindow; nsCOMPtr<nsIDOMWindow> mWindow;
// MediaStreams are owned by the graph, but we tell them when to die, // MediaStream is owned by the graph, but we tell it when to die, and it won't
// and they won't die until we let them. // die until we let it.
MediaStream* mStream;
// This stream contains tracks used as input by us. Cloning happens from this nsAutoTArray<nsRefPtr<MediaStreamTrack>,2> mTracks;
// stream. Tracks may exist in these stream but not in |mOwnedStream| if they nsRefPtr<StreamListener> mListener;
// have been stopped.
MediaStream* mInputStream;
// This stream contains tracks owned by us (if we were created directly from
// source, or cloned from some other stream). Tracks map to |mOwnedTracks|.
ProcessedMediaStream* mOwnedStream;
// This stream contains tracks currently played by us, despite of owner.
// Tracks map to |mTracks|.
ProcessedMediaStream* mPlaybackStream;
// This port connects mInputStream to mOwnedStream. All tracks forwarded.
nsRefPtr<MediaInputPort> mOwnedPort;
// This port connects mOwnedStream to mPlaybackStream. All tracks not
// explicitly blocked due to removal are forwarded.
nsRefPtr<MediaInputPort> mPlaybackPort;
// MediaStreamTracks corresponding to tracks in our mOwnedStream.
nsAutoTArray<nsRefPtr<TrackPort>, 2> mOwnedTracks;
// MediaStreamTracks corresponding to tracks in our mPlaybackStream.
nsAutoTArray<nsRefPtr<TrackPort>, 2> mTracks;
nsRefPtr<OwnedStreamListener> mOwnedListener;
nsRefPtr<PlaybackStreamListener> mPlaybackListener;
nsTArray<nsAutoPtr<OnTracksAvailableCallback> > mRunOnTracksAvailable; nsTArray<nsAutoPtr<OnTracksAvailableCallback> > mRunOnTracksAvailable;
// Set to true after MediaStreamGraph has created tracks for mPlaybackStream. // Set to true after MediaStreamGraph has created tracks for mStream.
bool mTracksCreated; bool mTracksCreated;
nsString mID; nsString mID;

Просмотреть файл

@ -637,7 +637,7 @@ class nsDOMUserMediaStream : public DOMLocalMediaStream
{ {
public: public:
static already_AddRefed<nsDOMUserMediaStream> static already_AddRefed<nsDOMUserMediaStream>
CreateSourceStream(nsIDOMWindow* aWindow, CreateTrackUnionStream(nsIDOMWindow* aWindow,
GetUserMediaCallbackMediaStreamListener* aListener, GetUserMediaCallbackMediaStreamListener* aListener,
AudioDevice* aAudioDevice, AudioDevice* aAudioDevice,
VideoDevice* aVideoDevice, VideoDevice* aVideoDevice,
@ -646,7 +646,7 @@ public:
nsRefPtr<nsDOMUserMediaStream> stream = new nsDOMUserMediaStream(aListener, nsRefPtr<nsDOMUserMediaStream> stream = new nsDOMUserMediaStream(aListener,
aAudioDevice, aAudioDevice,
aVideoDevice); aVideoDevice);
stream->InitSourceStream(aWindow, aMSG); stream->InitTrackUnionStream(aWindow, aMSG);
return stream.forget(); return stream.forget();
} }
@ -675,15 +675,18 @@ public:
{ {
Stop(); Stop();
if (GetSourceStream()) { if (mPort) {
GetSourceStream()->Destroy(); mPort->Destroy();
}
if (mSourceStream) {
mSourceStream->Destroy();
} }
} }
virtual void Stop() override virtual void Stop() override
{ {
if (GetSourceStream()) { if (mSourceStream) {
GetSourceStream()->EndAllTrackAndFinish(); mSourceStream->EndAllTrackAndFinish();
} }
} }
@ -693,14 +696,14 @@ public:
// XXX This will not handle more complex cases well. // XXX This will not handle more complex cases well.
virtual void StopTrack(TrackID aTrackID) override virtual void StopTrack(TrackID aTrackID) override
{ {
if (GetSourceStream()) { if (mSourceStream) {
GetSourceStream()->EndTrack(aTrackID); mSourceStream->EndTrack(aTrackID);
// We could override NotifyMediaStreamTrackEnded(), and maybe should, but it's // We could override NotifyMediaStreamTrackEnded(), and maybe should, but it's
// risky to do late in a release since that will affect all track ends, and not // risky to do late in a release since that will affect all track ends, and not
// just StopTrack()s. // just StopTrack()s.
nsRefPtr<dom::MediaStreamTrack> ownedTrack = FindOwnedDOMTrack(mOwnedStream, aTrackID); if (GetDOMTrackFor(aTrackID)) {
if (ownedTrack) { mListener->StopTrack(aTrackID,
mListener->StopTrack(aTrackID, !!ownedTrack->AsAudioStreamTrack()); !!GetDOMTrackFor(aTrackID)->AsAudioStreamTrack());
} else { } else {
LOG(("StopTrack(%d) on non-existent track", aTrackID)); LOG(("StopTrack(%d) on non-existent track", aTrackID));
} }
@ -723,7 +726,7 @@ public:
promise->MaybeReject(error); promise->MaybeReject(error);
return promise.forget(); return promise.forget();
} }
if (!GetSourceStream()) { if (!mSourceStream) {
nsRefPtr<MediaStreamError> error = new MediaStreamError(window, nsRefPtr<MediaStreamError> error = new MediaStreamError(window,
NS_LITERAL_STRING("InternalError"), NS_LITERAL_STRING("InternalError"),
NS_LITERAL_STRING("No stream.")); NS_LITERAL_STRING("No stream."));
@ -731,7 +734,7 @@ public:
return promise.forget(); return promise.forget();
} }
nsRefPtr<dom::MediaStreamTrack> track = FindOwnedDOMTrack(mOwnedStream, aTrackID); nsRefPtr<dom::MediaStreamTrack> track = GetDOMTrackFor(aTrackID);
if (!track) { if (!track) {
LOG(("ApplyConstraintsToTrack(%d) on non-existent track", aTrackID)); LOG(("ApplyConstraintsToTrack(%d) on non-existent track", aTrackID));
nsRefPtr<MediaStreamError> error = new MediaStreamError(window, nsRefPtr<MediaStreamError> error = new MediaStreamError(window,
@ -771,8 +774,8 @@ public:
// Allow getUserMedia to pass input data directly to PeerConnection/MediaPipeline // Allow getUserMedia to pass input data directly to PeerConnection/MediaPipeline
virtual bool AddDirectListener(MediaStreamDirectListener *aListener) override virtual bool AddDirectListener(MediaStreamDirectListener *aListener) override
{ {
if (GetSourceStream()) { if (mSourceStream) {
GetSourceStream()->AddDirectListener(aListener); mSourceStream->AddDirectListener(aListener);
return true; // application should ignore NotifyQueuedTrackData return true; // application should ignore NotifyQueuedTrackData
} }
return false; return false;
@ -795,11 +798,22 @@ public:
virtual void RemoveDirectListener(MediaStreamDirectListener *aListener) override virtual void RemoveDirectListener(MediaStreamDirectListener *aListener) override
{ {
if (GetSourceStream()) { if (mSourceStream) {
GetSourceStream()->RemoveDirectListener(aListener); mSourceStream->RemoveDirectListener(aListener);
} }
} }
// let us intervene for direct listeners when someone does track.enabled = false
virtual void SetTrackEnabled(TrackID aTrackID, bool aEnabled) override
{
// We encapsulate the SourceMediaStream and TrackUnion into one entity, so
// we can handle the disabling at the SourceMediaStream
// We need to find the input track ID for output ID aTrackID, so we let the TrackUnion
// forward the request to the source and translate the ID
GetStream()->AsProcessedStream()->ForwardTrackEnabled(aTrackID, aEnabled);
}
virtual DOMLocalMediaStream* AsDOMLocalMediaStream() override virtual DOMLocalMediaStream* AsDOMLocalMediaStream() override
{ {
return this; return this;
@ -819,14 +833,10 @@ public:
return nullptr; return nullptr;
} }
SourceMediaStream* GetSourceStream() // The actual MediaStream is a TrackUnionStream. But these resources need to be
{ // explicitly destroyed too.
if (GetInputStream()) { nsRefPtr<SourceMediaStream> mSourceStream;
return GetInputStream()->AsSourceStream(); nsRefPtr<MediaInputPort> mPort;
}
return nullptr;
}
nsRefPtr<GetUserMediaCallbackMediaStreamListener> mListener; nsRefPtr<GetUserMediaCallbackMediaStreamListener> mListener;
nsRefPtr<AudioDevice> mAudioDevice; // so we can turn on AEC nsRefPtr<AudioDevice> mAudioDevice; // so we can turn on AEC
nsRefPtr<VideoDevice> mVideoDevice; nsRefPtr<VideoDevice> mVideoDevice;
@ -917,7 +927,7 @@ public:
// Start currentTime from the point where this stream was successfully // Start currentTime from the point where this stream was successfully
// returned. // returned.
aStream->SetLogicalStreamStartTime(aStream->GetPlaybackStream()->GetCurrentTime()); aStream->SetLogicalStreamStartTime(aStream->GetStream()->GetCurrentTime());
// This is safe since we're on main-thread, and the windowlist can only // This is safe since we're on main-thread, and the windowlist can only
// be invalidated from the main-thread (see OnNavigation) // be invalidated from the main-thread (see OnNavigation)
@ -989,8 +999,9 @@ public:
MediaStreamGraph::GetInstance(graphDriverType, MediaStreamGraph::GetInstance(graphDriverType,
dom::AudioChannel::Normal); dom::AudioChannel::Normal);
nsRefPtr<SourceMediaStream> stream = msg->CreateSourceStream(nullptr);
nsRefPtr<DOMLocalMediaStream> domStream; nsRefPtr<DOMLocalMediaStream> domStream;
nsRefPtr<SourceMediaStream> stream;
// AudioCapture is a special case, here, in the sense that we're not really // AudioCapture is a special case, here, in the sense that we're not really
// using the audio source and the SourceMediaStream, which acts as // using the audio source and the SourceMediaStream, which acts as
// placeholders. We re-route a number of stream internaly in the MSG and mix // placeholders. We re-route a number of stream internaly in the MSG and mix
@ -1001,33 +1012,38 @@ public:
// It should be possible to pipe the capture stream to anything. CORS is // It should be possible to pipe the capture stream to anything. CORS is
// not a problem here, we got explicit user content. // not a problem here, we got explicit user content.
domStream->SetPrincipal(window->GetExtantDoc()->NodePrincipal()); domStream->SetPrincipal(window->GetExtantDoc()->NodePrincipal());
stream = msg->CreateSourceStream(nullptr); // Placeholder
msg->RegisterCaptureStreamForWindow( msg->RegisterCaptureStreamForWindow(
mWindowID, domStream->GetInputStream()->AsProcessedStream()); mWindowID, domStream->GetStream()->AsProcessedStream());
window->SetAudioCapture(true); window->SetAudioCapture(true);
} else { } else {
// Normal case, connect the source stream to the track union stream to // Normal case, connect the source stream to the track union stream to
// avoid us blocking // avoid us blocking
domStream = nsDOMUserMediaStream::CreateSourceStream(window, mListener, nsRefPtr<nsDOMUserMediaStream> trackunion =
mAudioDevice, mVideoDevice, nsDOMUserMediaStream::CreateTrackUnionStream(window, mListener,
msg); mAudioDevice, mVideoDevice,
msg);
if (mAudioDevice) { trackunion->GetStream()->AsProcessedStream()->SetAutofinish(true);
domStream->CreateOwnDOMTrack(kAudioTrack, MediaSegment::AUDIO); nsRefPtr<MediaInputPort> port = trackunion->GetStream()->AsProcessedStream()->
} AllocateInputPort(stream);
if (mVideoDevice) { trackunion->mSourceStream = stream;
domStream->CreateOwnDOMTrack(kVideoTrack, MediaSegment::VIDEO); trackunion->mPort = port.forget();
} // Log the relationship between SourceMediaStream and TrackUnion stream
// Make sure logger starts before capture
AsyncLatencyLogger::Get(true);
LogLatency(AsyncLatencyLogger::MediaStreamCreate,
reinterpret_cast<uint64_t>(stream.get()),
reinterpret_cast<int64_t>(trackunion->GetStream()));
nsCOMPtr<nsIPrincipal> principal; nsCOMPtr<nsIPrincipal> principal;
if (mPeerIdentity) { if (mPeerIdentity) {
principal = nsNullPrincipal::Create(); principal = nsNullPrincipal::Create();
domStream->SetPeerIdentity(mPeerIdentity.forget()); trackunion->SetPeerIdentity(mPeerIdentity.forget());
} else { } else {
principal = window->GetExtantDoc()->NodePrincipal(); principal = window->GetExtantDoc()->NodePrincipal();
} }
domStream->CombineWithPrincipal(principal); trackunion->CombineWithPrincipal(principal);
stream = domStream->GetInputStream()->AsSourceStream();
domStream = trackunion.forget();
} }
if (!domStream || sInShutdown) { if (!domStream || sInShutdown) {
@ -1049,7 +1065,6 @@ public:
// Activate our listener. We'll call Start() on the source when get a callback // Activate our listener. We'll call Start() on the source when get a callback
// that the MediaStream has started consuming. The listener is freed // that the MediaStream has started consuming. The listener is freed
// when the page is invalidated (on navigation or close). // when the page is invalidated (on navigation or close).
MOZ_ASSERT(stream);
mListener->Activate(stream.forget(), mAudioDevice, mVideoDevice); mListener->Activate(stream.forget(), mAudioDevice, mVideoDevice);
// Note: includes JS callbacks; must be released on MainThread // Note: includes JS callbacks; must be released on MainThread
@ -1063,7 +1078,7 @@ public:
// Dispatch to the media thread to ask it to start the sources, // Dispatch to the media thread to ask it to start the sources,
// because that can take a while. // because that can take a while.
// Pass ownership of domStream to the MediaOperationTask // Pass ownership of trackunion to the MediaOperationTask
// to ensure it's kept alive until the MediaOperationTask runs (at least). // to ensure it's kept alive until the MediaOperationTask runs (at least).
MediaManager::PostTask(FROM_HERE, MediaManager::PostTask(FROM_HERE,
new MediaOperationTask(MEDIA_START, mListener, domStream, new MediaOperationTask(MEDIA_START, mListener, domStream,

Просмотреть файл

@ -540,7 +540,8 @@ private:
mTrackUnionStream->SetAutofinish(true); mTrackUnionStream->SetAutofinish(true);
// Bind this Track Union Stream with Source Media. // Bind this Track Union Stream with Source Media.
mInputPort = mTrackUnionStream->AllocateInputPort(mRecorder->GetSourceMediaStream()); mInputPort = mTrackUnionStream->AllocateInputPort(mRecorder->GetSourceMediaStream(),
0);
DOMMediaStream* domStream = mRecorder->Stream(); DOMMediaStream* domStream = mRecorder->Stream();
if (domStream) { if (domStream) {
@ -799,7 +800,7 @@ MediaRecorder::MediaRecorder(AudioNode& aSrcAudioNode,
AudioNodeStream* ns = aSrcAudioNode.GetStream(); AudioNodeStream* ns = aSrcAudioNode.GetStream();
if (ns) { if (ns) {
mInputPort = mPipeStream->AllocateInputPort(aSrcAudioNode.GetStream(), mInputPort = mPipeStream->AllocateInputPort(aSrcAudioNode.GetStream(),
TRACK_ANY, 0, aSrcOutput); 0, aSrcOutput);
} }
} }
mAudioNode = &aSrcAudioNode; mAudioNode = &aSrcAudioNode;
@ -1173,7 +1174,7 @@ MediaStream*
MediaRecorder::GetSourceMediaStream() MediaRecorder::GetSourceMediaStream()
{ {
if (mDOMStream != nullptr) { if (mDOMStream != nullptr) {
return mDOMStream->GetPlaybackStream(); return mDOMStream->GetStream();
} }
MOZ_ASSERT(mAudioNode != nullptr); MOZ_ASSERT(mAudioNode != nullptr);
return mPipeStream ? mPipeStream.get() : mAudioNode->GetStream(); return mPipeStream ? mPipeStream.get() : mAudioNode->GetStream();

Просмотреть файл

@ -1601,7 +1601,7 @@ MediaStream::MediaStream(DOMMediaStream* aWrapper)
// aWrapper should not already be connected to a MediaStream! It needs // aWrapper should not already be connected to a MediaStream! It needs
// to be hooked up to this stream, and since this stream is only just // to be hooked up to this stream, and since this stream is only just
// being created now, aWrapper must not be connected to anything. // being created now, aWrapper must not be connected to anything.
NS_ASSERTION(!aWrapper || !aWrapper->GetPlaybackStream(), NS_ASSERTION(!aWrapper || !aWrapper->GetStream(),
"Wrapper already has another media stream hooked up to it!"); "Wrapper already has another media stream hooked up to it!");
} }
@ -2430,39 +2430,8 @@ MediaInputPort::SetGraphImpl(MediaStreamGraphImpl* aGraph)
mGraph = aGraph; mGraph = aGraph;
} }
void
MediaInputPort::BlockTrackIdImpl(TrackID aTrackId)
{
mBlockedTracks.AppendElement(aTrackId);
}
void
MediaInputPort::BlockTrackId(TrackID aTrackId)
{
class Message : public ControlMessage {
public:
explicit Message(MediaInputPort* aPort, TrackID aTrackId)
: ControlMessage(aPort->GetDestination()),
mPort(aPort), mTrackId(aTrackId) {}
virtual void Run()
{
mPort->BlockTrackIdImpl(mTrackId);
}
virtual void RunDuringShutdown()
{
Run();
}
nsRefPtr<MediaInputPort> mPort;
TrackID mTrackId;
};
MOZ_ASSERT(aTrackId != TRACK_NONE && aTrackId != TRACK_INVALID && aTrackId != TRACK_ANY,
"Only explicit TrackID is allowed");
GraphImpl()->AppendMessage(new Message(this, aTrackId));
}
already_AddRefed<MediaInputPort> already_AddRefed<MediaInputPort>
ProcessedMediaStream::AllocateInputPort(MediaStream* aStream, TrackID aTrackID, ProcessedMediaStream::AllocateInputPort(MediaStream* aStream,
uint16_t aInputNumber, uint16_t aOutputNumber) uint16_t aInputNumber, uint16_t aOutputNumber)
{ {
// This method creates two references to the MediaInputPort: one for // This method creates two references to the MediaInputPort: one for
@ -2485,10 +2454,7 @@ ProcessedMediaStream::AllocateInputPort(MediaStream* aStream, TrackID aTrackID,
} }
nsRefPtr<MediaInputPort> mPort; nsRefPtr<MediaInputPort> mPort;
}; };
nsRefPtr<MediaInputPort> port = new MediaInputPort(aStream, this,
MOZ_ASSERT(aTrackID != TRACK_NONE && aTrackID != TRACK_INVALID,
"Only TRACK_ANY and explicit ID are allowed");
nsRefPtr<MediaInputPort> port = new MediaInputPort(aStream, aTrackID, this,
aInputNumber, aOutputNumber); aInputNumber, aOutputNumber);
port->SetGraphImpl(GraphImpl()); port->SetGraphImpl(GraphImpl());
GraphImpl()->AppendMessage(new Message(port)); GraphImpl()->AppendMessage(new Message(port));
@ -2799,10 +2765,9 @@ MediaStreamGraph::CreateTrackUnionStream(DOMMediaStream* aWrapper)
} }
ProcessedMediaStream* ProcessedMediaStream*
MediaStreamGraph::CreateAudioCaptureStream(DOMMediaStream* aWrapper, MediaStreamGraph::CreateAudioCaptureStream(DOMMediaStream* aWrapper)
TrackID aTrackId)
{ {
AudioCaptureStream* stream = new AudioCaptureStream(aWrapper, aTrackId); AudioCaptureStream* stream = new AudioCaptureStream(aWrapper);
AddStream(stream); AddStream(stream);
return stream; return stream;
} }

Просмотреть файл

@ -161,16 +161,11 @@ public:
* aTrackEvents can be any combination of TRACK_EVENT_CREATED and * aTrackEvents can be any combination of TRACK_EVENT_CREATED and
* TRACK_EVENT_ENDED. aQueuedMedia is the data being added to the track * TRACK_EVENT_ENDED. aQueuedMedia is the data being added to the track
* at aTrackOffset (relative to the start of the stream). * at aTrackOffset (relative to the start of the stream).
* aInputStream and aInputTrackID will be set if the changes originated
* from an input stream's track. In practice they will only be used for
* ProcessedMediaStreams.
*/ */
virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
StreamTime aTrackOffset, StreamTime aTrackOffset,
uint32_t aTrackEvents, uint32_t aTrackEvents,
const MediaSegment& aQueuedMedia, const MediaSegment& aQueuedMedia) {}
MediaStream* aInputStream = nullptr,
TrackID aInputTrackID = TRACK_INVALID) {}
/** /**
* Notify that all new tracks this iteration have been created. * Notify that all new tracks this iteration have been created.
@ -424,6 +419,7 @@ public:
virtual SourceMediaStream* AsSourceStream() { return nullptr; } virtual SourceMediaStream* AsSourceStream() { return nullptr; }
virtual ProcessedMediaStream* AsProcessedStream() { return nullptr; } virtual ProcessedMediaStream* AsProcessedStream() { return nullptr; }
virtual AudioNodeStream* AsAudioNodeStream() { return nullptr; } virtual AudioNodeStream* AsAudioNodeStream() { return nullptr; }
virtual CameraPreviewMediaStream* AsCameraPreviewStream() { return nullptr; }
// These Impl methods perform the core functionality of the control methods // These Impl methods perform the core functionality of the control methods
// above, on the media graph thread. // above, on the media graph thread.
@ -909,10 +905,6 @@ protected:
* We make these refcounted so that stream-related messages with MediaInputPort* * We make these refcounted so that stream-related messages with MediaInputPort*
* pointers can be sent to the main thread safely. * pointers can be sent to the main thread safely.
* *
* A port can be locked to a specific track in the source stream, in which case
* only this track will be forwarded to the destination stream. TRACK_ANY
* can used to signal that all tracks shall be forwarded.
*
* When a port's source or destination stream dies, the stream's DestroyImpl * When a port's source or destination stream dies, the stream's DestroyImpl
* calls MediaInputPort::Disconnect to disconnect the port from * calls MediaInputPort::Disconnect to disconnect the port from
* the source and destination streams. * the source and destination streams.
@ -928,11 +920,9 @@ class MediaInputPort final
{ {
private: private:
// Do not call this constructor directly. Instead call aDest->AllocateInputPort. // Do not call this constructor directly. Instead call aDest->AllocateInputPort.
MediaInputPort(MediaStream* aSource, TrackID& aSourceTrack, MediaInputPort(MediaStream* aSource, ProcessedMediaStream* aDest,
ProcessedMediaStream* aDest,
uint16_t aInputNumber, uint16_t aOutputNumber) uint16_t aInputNumber, uint16_t aOutputNumber)
: mSource(aSource) : mSource(aSource)
, mSourceTrack(aSourceTrack)
, mDest(aDest) , mDest(aDest)
, mInputNumber(aInputNumber) , mInputNumber(aInputNumber)
, mOutputNumber(aOutputNumber) , mOutputNumber(aOutputNumber)
@ -965,22 +955,8 @@ public:
// Any thread // Any thread
MediaStream* GetSource() { return mSource; } MediaStream* GetSource() { return mSource; }
TrackID GetSourceTrackId() { return mSourceTrack; }
ProcessedMediaStream* GetDestination() { return mDest; } ProcessedMediaStream* GetDestination() { return mDest; }
// Block aTrackId in the port. Consumers will interpret this track as ended.
void BlockTrackId(TrackID aTrackId);
private:
void BlockTrackIdImpl(TrackID aTrackId);
public:
// Returns true if aTrackId has not been blocked and this port has not
// been locked to another track.
bool PassTrackThrough(TrackID aTrackId) {
return !mBlockedTracks.Contains(aTrackId) &&
(mSourceTrack == TRACK_ANY || mSourceTrack == aTrackId);
}
uint16_t InputNumber() const { return mInputNumber; } uint16_t InputNumber() const { return mInputNumber; }
uint16_t OutputNumber() const { return mOutputNumber; } uint16_t OutputNumber() const { return mOutputNumber; }
@ -1026,13 +1002,11 @@ private:
friend class ProcessedMediaStream; friend class ProcessedMediaStream;
// Never modified after Init() // Never modified after Init()
MediaStream* mSource; MediaStream* mSource;
TrackID mSourceTrack;
ProcessedMediaStream* mDest; ProcessedMediaStream* mDest;
// The input and output numbers are optional, and are currently only used by // The input and output numbers are optional, and are currently only used by
// Web Audio. // Web Audio.
const uint16_t mInputNumber; const uint16_t mInputNumber;
const uint16_t mOutputNumber; const uint16_t mOutputNumber;
nsTArray<TrackID> mBlockedTracks;
// Our media stream graph // Our media stream graph
MediaStreamGraphImpl* mGraph; MediaStreamGraphImpl* mGraph;
@ -1054,13 +1028,8 @@ public:
/** /**
* Allocates a new input port attached to source aStream. * Allocates a new input port attached to source aStream.
* This stream can be removed by calling MediaInputPort::Remove(). * This stream can be removed by calling MediaInputPort::Remove().
* The input port is tied to aTrackID in the source stream.
* aTrackID can be set to TRACK_ANY to automatically forward all tracks from
* aStream. To end a track in the destination stream forwarded with TRACK_ANY,
* it can be blocked in the input port through MediaInputPort::BlockTrackId().
*/ */
already_AddRefed<MediaInputPort> AllocateInputPort(MediaStream* aStream, already_AddRefed<MediaInputPort> AllocateInputPort(MediaStream* aStream,
TrackID aTrackID = TRACK_ANY,
uint16_t aInputNumber = 0, uint16_t aInputNumber = 0,
uint16_t aOutputNumber = 0); uint16_t aOutputNumber = 0);
/** /**
@ -1115,6 +1084,11 @@ public:
virtual void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) = 0; virtual void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) = 0;
void SetAutofinishImpl(bool aAutofinish) { mAutofinish = aAutofinish; } void SetAutofinishImpl(bool aAutofinish) { mAutofinish = aAutofinish; }
/**
* Forward SetTrackEnabled() to the input MediaStream(s) and translate the ID
*/
virtual void ForwardTrackEnabled(TrackID aOutputID, bool aEnabled) {};
// Only valid after MediaStreamGraphImpl::UpdateStreamOrder() has run. // Only valid after MediaStreamGraphImpl::UpdateStreamOrder() has run.
// A DelayNode is considered to break a cycle and so this will not return // A DelayNode is considered to break a cycle and so this will not return
// true for echo loops, only for muted cycles. // true for echo loops, only for muted cycles.
@ -1202,8 +1176,7 @@ public:
/** /**
* Create a stream that will mix all its audio input. * Create a stream that will mix all its audio input.
*/ */
ProcessedMediaStream* CreateAudioCaptureStream(DOMMediaStream* aWrapper, ProcessedMediaStream* CreateAudioCaptureStream(DOMMediaStream* aWrapper);
TrackID aTrackId);
enum { enum {
ADD_STREAM_SUSPENDED = 0x01 ADD_STREAM_SUSPENDED = 0x01

Просмотреть файл

@ -13,7 +13,7 @@ namespace mozilla {
namespace dom { namespace dom {
MediaStreamTrack::MediaStreamTrack(DOMMediaStream* aStream, TrackID aTrackID) MediaStreamTrack::MediaStreamTrack(DOMMediaStream* aStream, TrackID aTrackID)
: mOwningStream(aStream), mTrackID(aTrackID), mEnded(false), mEnabled(true) : mStream(aStream), mTrackID(aTrackID), mEnded(false), mEnabled(true)
{ {
nsresult rv; nsresult rv;
@ -36,7 +36,7 @@ MediaStreamTrack::~MediaStreamTrack()
} }
NS_IMPL_CYCLE_COLLECTION_INHERITED(MediaStreamTrack, DOMEventTargetHelper, NS_IMPL_CYCLE_COLLECTION_INHERITED(MediaStreamTrack, DOMEventTargetHelper,
mOwningStream) mStream)
NS_IMPL_ADDREF_INHERITED(MediaStreamTrack, DOMEventTargetHelper) NS_IMPL_ADDREF_INHERITED(MediaStreamTrack, DOMEventTargetHelper)
NS_IMPL_RELEASE_INHERITED(MediaStreamTrack, DOMEventTargetHelper) NS_IMPL_RELEASE_INHERITED(MediaStreamTrack, DOMEventTargetHelper)
@ -53,20 +53,20 @@ void
MediaStreamTrack::SetEnabled(bool aEnabled) MediaStreamTrack::SetEnabled(bool aEnabled)
{ {
mEnabled = aEnabled; mEnabled = aEnabled;
mOwningStream->SetTrackEnabled(mTrackID, aEnabled); mStream->SetTrackEnabled(mTrackID, aEnabled);
} }
void void
MediaStreamTrack::Stop() MediaStreamTrack::Stop()
{ {
mOwningStream->StopTrack(mTrackID); mStream->StopTrack(mTrackID);
} }
already_AddRefed<Promise> already_AddRefed<Promise>
MediaStreamTrack::ApplyConstraints(const MediaTrackConstraints& aConstraints, MediaStreamTrack::ApplyConstraints(const MediaTrackConstraints& aConstraints,
ErrorResult &aRv) ErrorResult &aRv)
{ {
return GetStream()->ApplyConstraintsToTrack(mTrackID, aConstraints, aRv); return mStream->ApplyConstraintsToTrack(mTrackID, aConstraints, aRv);
} }
} // namespace dom } // namespace dom

Просмотреть файл

@ -35,18 +35,10 @@ public:
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaStreamTrack, NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaStreamTrack,
DOMEventTargetHelper) DOMEventTargetHelper)
DOMMediaStream* GetParentObject() const { return mOwningStream; } DOMMediaStream* GetParentObject() const { return mStream; }
virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override = 0; virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override = 0;
/** DOMMediaStream* GetStream() const { return mStream; }
* Returns the DOMMediaStream owning this track.
*/
DOMMediaStream* GetStream() const { return mOwningStream; }
/**
* Returns the TrackID this stream has in its owning DOMMediaStream's Owned
* stream.
*/
TrackID GetTrackID() const { return mTrackID; } TrackID GetTrackID() const { return mTrackID; }
virtual AudioStreamTrack* AsAudioStreamTrack() { return nullptr; } virtual AudioStreamTrack* AsAudioStreamTrack() { return nullptr; }
virtual VideoStreamTrack* AsVideoStreamTrack() { return nullptr; } virtual VideoStreamTrack* AsVideoStreamTrack() { return nullptr; }
@ -71,7 +63,7 @@ public:
protected: protected:
virtual ~MediaStreamTrack(); virtual ~MediaStreamTrack();
nsRefPtr<DOMMediaStream> mOwningStream; nsRefPtr<DOMMediaStream> mStream;
TrackID mTrackID; TrackID mTrackID;
nsString mID; nsString mID;
bool mEnded; bool mEnded;

Просмотреть файл

@ -19,7 +19,6 @@ namespace mozilla {
typedef int32_t TrackID; typedef int32_t TrackID;
const TrackID TRACK_NONE = 0; const TrackID TRACK_NONE = 0;
const TrackID TRACK_INVALID = -1; const TrackID TRACK_INVALID = -1;
const TrackID TRACK_ANY = -2;
inline TrackTicks RateConvertTicksRoundDown(TrackRate aOutRate, inline TrackTicks RateConvertTicksRoundDown(TrackRate aOutRate,
TrackRate aInRate, TrackRate aInRate,

Просмотреть файл

@ -55,10 +55,8 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
void TrackUnionStream::RemoveInput(MediaInputPort* aPort) void TrackUnionStream::RemoveInput(MediaInputPort* aPort)
{ {
STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p removing input %p", this, aPort));
for (int32_t i = mTrackMap.Length() - 1; i >= 0; --i) { for (int32_t i = mTrackMap.Length() - 1; i >= 0; --i) {
if (mTrackMap[i].mInputPort == aPort) { if (mTrackMap[i].mInputPort == aPort) {
STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p removing trackmap entry %d", this, i));
EndTrack(i); EndTrack(i);
mTrackMap.RemoveElementAt(i); mTrackMap.RemoveElementAt(i);
} }
@ -98,19 +96,18 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
if (map->mInputPort == mInputs[i] && map->mInputTrackID == tracks->GetID()) { if (map->mInputPort == mInputs[i] && map->mInputTrackID == tracks->GetID()) {
bool trackFinished; bool trackFinished;
StreamBuffer::Track* outputTrack = mBuffer.FindTrack(map->mOutputTrackID); StreamBuffer::Track* outputTrack = mBuffer.FindTrack(map->mOutputTrackID);
found = true; if (!outputTrack || outputTrack->IsEnded()) {
if (!outputTrack || outputTrack->IsEnded() ||
!mInputs[i]->PassTrackThrough(tracks->GetID())) {
trackFinished = true; trackFinished = true;
} else { } else {
CopyTrackData(tracks.get(), j, aFrom, aTo, &trackFinished); CopyTrackData(tracks.get(), j, aFrom, aTo, &trackFinished);
} }
mappedTracksFinished[j] = trackFinished; mappedTracksFinished[j] = trackFinished;
mappedTracksWithMatchingInputTracks[j] = true; mappedTracksWithMatchingInputTracks[j] = true;
found = true;
break; break;
} }
} }
if (!found && mInputs[i]->PassTrackThrough(tracks->GetID())) { if (!found) {
bool trackFinished = false; bool trackFinished = false;
trackAdded = true; trackAdded = true;
uint32_t mapIndex = AddTrack(mInputs[i], tracks.get(), aFrom); uint32_t mapIndex = AddTrack(mInputs[i], tracks.get(), aFrom);
@ -149,6 +146,18 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
} }
} }
// Forward SetTrackEnabled(output_track_id, enabled) to the Source MediaStream,
// translating the output track ID into the correct ID in the source.
void TrackUnionStream::ForwardTrackEnabled(TrackID aOutputID, bool aEnabled)
{
for (int32_t i = mTrackMap.Length() - 1; i >= 0; --i) {
if (mTrackMap[i].mOutputTrackID == aOutputID) {
mTrackMap[i].mInputPort->GetSource()->
SetTrackEnabled(mTrackMap[i].mInputTrackID, aEnabled);
}
}
}
uint32_t TrackUnionStream::AddTrack(MediaInputPort* aPort, StreamBuffer::Track* aTrack, uint32_t TrackUnionStream::AddTrack(MediaInputPort* aPort, StreamBuffer::Track* aTrack,
GraphTime aFrom) GraphTime aFrom)
{ {
@ -182,8 +191,7 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
MediaStreamListener* l = mListeners[j]; MediaStreamListener* l = mListeners[j];
l->NotifyQueuedTrackChanges(Graph(), id, outputStart, l->NotifyQueuedTrackChanges(Graph(), id, outputStart,
MediaStreamListener::TRACK_EVENT_CREATED, MediaStreamListener::TRACK_EVENT_CREATED,
*segment, *segment);
aPort->GetSource(), aTrack->GetID());
} }
segment->AppendNullData(outputStart); segment->AppendNullData(outputStart);
StreamBuffer::Track* track = StreamBuffer::Track* track =
@ -208,7 +216,6 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
StreamBuffer::Track* outputTrack = mBuffer.FindTrack(mTrackMap[aIndex].mOutputTrackID); StreamBuffer::Track* outputTrack = mBuffer.FindTrack(mTrackMap[aIndex].mOutputTrackID);
if (!outputTrack || outputTrack->IsEnded()) if (!outputTrack || outputTrack->IsEnded())
return; return;
STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p ending track %d", this, outputTrack->GetID()));
for (uint32_t j = 0; j < mListeners.Length(); ++j) { for (uint32_t j = 0; j < mListeners.Length(); ++j) {
MediaStreamListener* l = mListeners[j]; MediaStreamListener* l = mListeners[j];
StreamTime offset = outputTrack->GetSegment()->GetDuration(); StreamTime offset = outputTrack->GetSegment()->GetDuration();
@ -216,9 +223,7 @@ TrackUnionStream::TrackUnionStream(DOMMediaStream* aWrapper) :
segment = outputTrack->GetSegment()->CreateEmptyClone(); segment = outputTrack->GetSegment()->CreateEmptyClone();
l->NotifyQueuedTrackChanges(Graph(), outputTrack->GetID(), offset, l->NotifyQueuedTrackChanges(Graph(), outputTrack->GetID(), offset,
MediaStreamListener::TRACK_EVENT_ENDED, MediaStreamListener::TRACK_EVENT_ENDED,
*segment, *segment);
mTrackMap[aIndex].mInputPort->GetSource(),
mTrackMap[aIndex].mInputTrackID);
} }
outputTrack->SetEnded(); outputTrack->SetEnded();
} }

Просмотреть файл

@ -21,6 +21,10 @@ public:
virtual void RemoveInput(MediaInputPort* aPort) override; virtual void RemoveInput(MediaInputPort* aPort) override;
virtual void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override; virtual void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
// Forward SetTrackEnabled(output_track_id, enabled) to the Source MediaStream,
// translating the output track ID into the correct ID in the source.
virtual void ForwardTrackEnabled(TrackID aOutputID, bool aEnabled) override;
protected: protected:
// Only non-ended tracks are allowed to persist in this map. // Only non-ended tracks are allowed to persist in this map.
struct TrackMapEntry { struct TrackMapEntry {

Просмотреть файл

@ -41,9 +41,7 @@ MediaEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph,
TrackID aID, TrackID aID,
StreamTime aTrackOffset, StreamTime aTrackOffset,
uint32_t aTrackEvents, uint32_t aTrackEvents,
const MediaSegment& aQueuedMedia, const MediaSegment& aQueuedMedia)
MediaStream* aInputStream,
TrackID aInputTrackID)
{ {
// Process the incoming raw track data from MediaStreamGraph, called on the // Process the incoming raw track data from MediaStreamGraph, called on the
// thread of MediaStreamGraph. // thread of MediaStreamGraph.

Просмотреть файл

@ -85,9 +85,7 @@ public :
virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
StreamTime aTrackOffset, StreamTime aTrackOffset,
uint32_t aTrackEvents, uint32_t aTrackEvents,
const MediaSegment& aQueuedMedia, const MediaSegment& aQueuedMedia) override;
MediaStream* aInputStream,
TrackID aInputTrackID) override;
/** /**
* Notified the stream is being removed. * Notified the stream is being removed.

Просмотреть файл

@ -57,7 +57,7 @@ CaptureTask::AttachStream()
nsRefPtr<DOMMediaStream> domStream = track->GetStream(); nsRefPtr<DOMMediaStream> domStream = track->GetStream();
domStream->AddPrincipalChangeObserver(this); domStream->AddPrincipalChangeObserver(this);
nsRefPtr<MediaStream> stream = domStream->GetPlaybackStream(); nsRefPtr<MediaStream> stream = domStream->GetStream();
stream->AddListener(this); stream->AddListener(this);
} }
@ -71,7 +71,7 @@ CaptureTask::DetachStream()
nsRefPtr<DOMMediaStream> domStream = track->GetStream(); nsRefPtr<DOMMediaStream> domStream = track->GetStream();
domStream->RemovePrincipalChangeObserver(this); domStream->RemovePrincipalChangeObserver(this);
nsRefPtr<MediaStream> stream = domStream->GetPlaybackStream(); nsRefPtr<MediaStream> stream = domStream->GetStream();
stream->RemoveListener(this); stream->RemoveListener(this);
} }
@ -86,9 +86,7 @@ void
CaptureTask::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, CaptureTask::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
StreamTime aTrackOffset, StreamTime aTrackOffset,
uint32_t aTrackEvents, uint32_t aTrackEvents,
const MediaSegment& aQueuedMedia, const MediaSegment& aQueuedMedia)
MediaStream* aInputStream,
TrackID aInputTrackID)
{ {
if (mImageGrabbedOrTrackEnd) { if (mImageGrabbedOrTrackEnd) {
return; return;

Просмотреть файл

@ -35,9 +35,7 @@ public:
virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
StreamTime aTrackOffset, StreamTime aTrackOffset,
uint32_t aTrackEvents, uint32_t aTrackEvents,
const MediaSegment& aQueuedMedia, const MediaSegment& aQueuedMedia) override;
MediaStream* aInputStream,
TrackID aInputTrackID) override;
virtual void NotifyEvent(MediaStreamGraph* aGraph, virtual void NotifyEvent(MediaStreamGraph* aGraph,
MediaStreamGraphEvent aEvent) override; MediaStreamGraphEvent aEvent) override;

Просмотреть файл

@ -225,8 +225,7 @@ AudioNode::Connect(AudioNode& aDestination, uint32_t aOutput,
MOZ_ASSERT(aInput <= UINT16_MAX, "Unexpected large input port number"); MOZ_ASSERT(aInput <= UINT16_MAX, "Unexpected large input port number");
MOZ_ASSERT(aOutput <= UINT16_MAX, "Unexpected large output port number"); MOZ_ASSERT(aOutput <= UINT16_MAX, "Unexpected large output port number");
input->mStreamPort = destinationStream-> input->mStreamPort = destinationStream->
AllocateInputPort(mStream, AudioNodeStream::AUDIO_TRACK, AllocateInputPort(mStream, static_cast<uint16_t>(aInput),
static_cast<uint16_t>(aInput),
static_cast<uint16_t>(aOutput)); static_cast<uint16_t>(aOutput));
} }
aDestination.NotifyInputsChanged(); aDestination.NotifyInputsChanged();
@ -268,8 +267,7 @@ AudioNode::Connect(AudioParam& aDestination, uint32_t aOutput,
// Setup our stream as an input to the AudioParam's stream // Setup our stream as an input to the AudioParam's stream
MOZ_ASSERT(aOutput <= UINT16_MAX, "Unexpected large output port number"); MOZ_ASSERT(aOutput <= UINT16_MAX, "Unexpected large output port number");
input->mStreamPort = input->mStreamPort =
ps->AllocateInputPort(mStream, AudioNodeStream::AUDIO_TRACK, ps->AllocateInputPort(mStream, 0, static_cast<uint16_t>(aOutput));
0, static_cast<uint16_t>(aOutput));
} }
} }

Просмотреть файл

@ -142,10 +142,6 @@ AudioNodeExternalInputStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
for (StreamBuffer::TrackIter tracks(source->mBuffer, MediaSegment::AUDIO); for (StreamBuffer::TrackIter tracks(source->mBuffer, MediaSegment::AUDIO);
!tracks.IsEnded(); tracks.Next()) { !tracks.IsEnded(); tracks.Next()) {
const StreamBuffer::Track& inputTrack = *tracks; const StreamBuffer::Track& inputTrack = *tracks;
if (!mInputs[0]->PassTrackThrough(tracks->GetID())) {
continue;
}
const AudioSegment& inputSegment = const AudioSegment& inputSegment =
*static_cast<AudioSegment*>(inputTrack.GetSegment()); *static_cast<AudioSegment*>(inputTrack.GetSegment());
if (inputSegment.IsNull()) { if (inputSegment.IsNull()) {

Просмотреть файл

@ -114,8 +114,7 @@ AudioParam::Stream()
// Setup the AudioParam's stream as an input to the owner AudioNode's stream // Setup the AudioParam's stream as an input to the owner AudioNode's stream
AudioNodeStream* nodeStream = mNode->GetStream(); AudioNodeStream* nodeStream = mNode->GetStream();
if (nodeStream) { if (nodeStream) {
mNodeStreamPort = mNodeStreamPort = nodeStream->AllocateInputPort(mStream);
nodeStream->AllocateInputPort(mStream, AudioNodeStream::AUDIO_TRACK);
} }
// Send the stream to the timeline on the MSG side. // Send the stream to the timeline on the MSG side.

Просмотреть файл

@ -34,14 +34,14 @@ MediaStreamAudioDestinationNode::MediaStreamAudioDestinationNode(AudioContext* a
aContext->Graph())) aContext->Graph()))
{ {
// Ensure an audio track with the correct ID is exposed to JS // Ensure an audio track with the correct ID is exposed to JS
mDOMStream->CreateOwnDOMTrack(AudioNodeStream::AUDIO_TRACK, MediaSegment::AUDIO); mDOMStream->CreateDOMTrack(AudioNodeStream::AUDIO_TRACK, MediaSegment::AUDIO);
ProcessedMediaStream* outputStream = mDOMStream->GetInputStream()->AsProcessedStream(); ProcessedMediaStream* outputStream = mDOMStream->GetStream()->AsProcessedStream();
MOZ_ASSERT(!!outputStream); MOZ_ASSERT(!!outputStream);
AudioNodeEngine* engine = new AudioNodeEngine(this); AudioNodeEngine* engine = new AudioNodeEngine(this);
mStream = AudioNodeStream::Create(aContext, engine, mStream = AudioNodeStream::Create(aContext, engine,
AudioNodeStream::EXTERNAL_OUTPUT); AudioNodeStream::EXTERNAL_OUTPUT);
mPort = outputStream->AllocateInputPort(mStream, AudioNodeStream::AUDIO_TRACK); mPort = outputStream->AllocateInputPort(mStream);
nsIDocument* doc = aContext->GetParentObject()->GetExtantDoc(); nsIDocument* doc = aContext->GetParentObject()->GetExtantDoc();
if (doc) { if (doc) {

Просмотреть файл

@ -8,7 +8,6 @@
#include "mozilla/dom/MediaStreamAudioSourceNodeBinding.h" #include "mozilla/dom/MediaStreamAudioSourceNodeBinding.h"
#include "AudioNodeEngine.h" #include "AudioNodeEngine.h"
#include "AudioNodeExternalInputStream.h" #include "AudioNodeExternalInputStream.h"
#include "AudioStreamTrack.h"
#include "nsIDocument.h" #include "nsIDocument.h"
#include "mozilla/CORSMode.h" #include "mozilla/CORSMode.h"
@ -42,7 +41,7 @@ MediaStreamAudioSourceNode::MediaStreamAudioSourceNode(AudioContext* aContext,
AudioNodeEngine* engine = new MediaStreamAudioSourceNodeEngine(this); AudioNodeEngine* engine = new MediaStreamAudioSourceNodeEngine(this);
mStream = AudioNodeExternalInputStream::Create(aContext->Graph(), engine); mStream = AudioNodeExternalInputStream::Create(aContext->Graph(), engine);
ProcessedMediaStream* outputStream = static_cast<ProcessedMediaStream*>(mStream.get()); ProcessedMediaStream* outputStream = static_cast<ProcessedMediaStream*>(mStream.get());
mInputPort = outputStream->AllocateInputPort(aMediaStream->GetPlaybackStream()); mInputPort = outputStream->AllocateInputPort(aMediaStream->GetStream());
mInputStream->AddConsumerToKeepAlive(static_cast<nsIDOMEventTarget*>(this)); mInputStream->AddConsumerToKeepAlive(static_cast<nsIDOMEventTarget*>(this));
PrincipalChanged(mInputStream); // trigger enabling/disabling of the connector PrincipalChanged(mInputStream); // trigger enabling/disabling of the connector

Просмотреть файл

@ -44,8 +44,7 @@ class MediaStreamAudioSourceNode : public AudioNode,
public DOMMediaStream::PrincipalChangeObserver public DOMMediaStream::PrincipalChangeObserver
{ {
public: public:
MediaStreamAudioSourceNode(AudioContext* aContext, MediaStreamAudioSourceNode(AudioContext* aContext, DOMMediaStream* aMediaStream);
DOMMediaStream* aMediaStream);
NS_DECL_ISUPPORTS_INHERITED NS_DECL_ISUPPORTS_INHERITED
NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaStreamAudioSourceNode, AudioNode) NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaStreamAudioSourceNode, AudioNode)

Просмотреть файл

@ -559,11 +559,11 @@ SpeechRecognition::StartRecording(DOMMediaStream* aDOMStream)
// doesn't get Destroy()'ed // doesn't get Destroy()'ed
mDOMStream = aDOMStream; mDOMStream = aDOMStream;
if (NS_WARN_IF(!mDOMStream->GetPlaybackStream())) { if (NS_WARN_IF(!mDOMStream->GetStream())) {
return NS_ERROR_UNEXPECTED; return NS_ERROR_UNEXPECTED;
} }
mSpeechListener = new SpeechStreamListener(this); mSpeechListener = new SpeechStreamListener(this);
mDOMStream->GetPlaybackStream()->AddListener(mSpeechListener); mDOMStream->GetStream()->AddListener(mSpeechListener);
mEndpointer.StartSession(); mEndpointer.StartSession();
@ -577,7 +577,7 @@ SpeechRecognition::StopRecording()
// we only really need to remove the listener explicitly when testing, // we only really need to remove the listener explicitly when testing,
// as our JS code still holds a reference to mDOMStream and only assigning // as our JS code still holds a reference to mDOMStream and only assigning
// it to nullptr isn't guaranteed to free the stream and the listener. // it to nullptr isn't guaranteed to free the stream and the listener.
mDOMStream->GetPlaybackStream()->RemoveListener(mSpeechListener); mDOMStream->GetStream()->RemoveListener(mSpeechListener);
mSpeechListener = nullptr; mSpeechListener = nullptr;
mDOMStream = nullptr; mDOMStream = nullptr;

Просмотреть файл

@ -33,9 +33,7 @@ SpeechStreamListener::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph,
TrackID aID, TrackID aID,
StreamTime aTrackOffset, StreamTime aTrackOffset,
uint32_t aTrackEvents, uint32_t aTrackEvents,
const MediaSegment& aQueuedMedia, const MediaSegment& aQueuedMedia)
MediaStream* aInputStream,
TrackID aInputTrackID)
{ {
AudioSegment* audio = const_cast<AudioSegment*>( AudioSegment* audio = const_cast<AudioSegment*>(
static_cast<const AudioSegment*>(&aQueuedMedia)); static_cast<const AudioSegment*>(&aQueuedMedia));

Просмотреть файл

@ -27,9 +27,7 @@ public:
virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID, virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
StreamTime aTrackOffset, StreamTime aTrackOffset,
uint32_t aTrackEvents, uint32_t aTrackEvents,
const MediaSegment& aQueuedMedia, const MediaSegment& aQueuedMedia) override;
MediaStream* aInputStream,
TrackID aInputTrackID) override;
virtual void NotifyEvent(MediaStreamGraph* aGraph, virtual void NotifyEvent(MediaStreamGraph* aGraph,
MediaStreamListener::MediaStreamGraphEvent event) override; MediaStreamListener::MediaStreamGraphEvent event) override;

Просмотреть файл

@ -21,8 +21,6 @@
extern PRLogModuleInfo* GetSpeechSynthLog(); extern PRLogModuleInfo* GetSpeechSynthLog();
#define LOG(type, msg) MOZ_LOG(GetSpeechSynthLog(), type, msg) #define LOG(type, msg) MOZ_LOG(GetSpeechSynthLog(), type, msg)
#define AUDIO_TRACK 1
namespace mozilla { namespace mozilla {
namespace dom { namespace dom {
@ -195,7 +193,7 @@ nsSpeechTask::Setup(nsISpeechTaskCallback* aCallback,
mChannels = aChannels; mChannels = aChannels;
AudioSegment* segment = new AudioSegment(); AudioSegment* segment = new AudioSegment();
mStream->AddAudioTrack(AUDIO_TRACK, aRate, 0, segment); mStream->AddAudioTrack(1, aRate, 0, segment);
mStream->AddAudioOutput(this); mStream->AddAudioOutput(this);
mStream->SetAudioOutputVolume(this, mVolume); mStream->SetAudioOutputVolume(this, mVolume);

Просмотреть файл

@ -683,7 +683,7 @@ nsresult MediaPipelineTransmit::ReplaceTrack(DOMMediaStream *domstream,
const std::string& track_id) { const std::string& track_id) {
// MainThread, checked in calls we make // MainThread, checked in calls we make
MOZ_MTLOG(ML_DEBUG, "Reattaching pipeline " << description_ << " to stream " MOZ_MTLOG(ML_DEBUG, "Reattaching pipeline " << description_ << " to stream "
<< static_cast<void *>(domstream->GetOwnedStream()) << static_cast<void *>(domstream->GetStream())
<< " track " << track_id << " conduit type=" << << " track " << track_id << " conduit type=" <<
(conduit_->type() == MediaSessionConduit::AUDIO ?"audio":"video")); (conduit_->type() == MediaSessionConduit::AUDIO ?"audio":"video"));
@ -691,7 +691,7 @@ nsresult MediaPipelineTransmit::ReplaceTrack(DOMMediaStream *domstream,
DetachMediaStream(); DetachMediaStream();
} }
domstream_ = domstream; // Detach clears it domstream_ = domstream; // Detach clears it
stream_ = domstream->GetOwnedStream(); stream_ = domstream->GetStream();
// Unsets the track id after RemoveListener() takes effect. // Unsets the track id after RemoveListener() takes effect.
listener_->UnsetTrackId(stream_->GraphImpl()); listener_->UnsetTrackId(stream_->GraphImpl());
track_id_ = track_id; track_id_ = track_id;
@ -868,9 +868,7 @@ void MediaPipelineTransmit::PipelineListener::
NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid, NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
StreamTime offset, StreamTime offset,
uint32_t events, uint32_t events,
const MediaSegment& queued_media, const MediaSegment& queued_media) {
MediaStream* aInputStream,
TrackID aInputTrackID) {
MOZ_MTLOG(ML_DEBUG, "MediaPipeline::NotifyQueuedTrackChanges()"); MOZ_MTLOG(ML_DEBUG, "MediaPipeline::NotifyQueuedTrackChanges()");
// ignore non-direct data if we're also getting direct data // ignore non-direct data if we're also getting direct data

Просмотреть файл

@ -388,7 +388,7 @@ public:
RefPtr<TransportFlow> rtcp_transport, RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter) : nsAutoPtr<MediaPipelineFilter> filter) :
MediaPipeline(pc, TRANSMIT, main_thread, sts_thread, MediaPipeline(pc, TRANSMIT, main_thread, sts_thread,
domstream->GetOwnedStream(), track_id, level, domstream->GetStream(), track_id, level,
conduit, rtp_transport, rtcp_transport, filter), conduit, rtp_transport, rtcp_transport, filter),
listener_(new PipelineListener(conduit)), listener_(new PipelineListener(conduit)),
domstream_(domstream), domstream_(domstream),
@ -482,9 +482,7 @@ public:
virtual void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid, virtual void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
StreamTime offset, StreamTime offset,
uint32_t events, uint32_t events,
const MediaSegment& queued_media, const MediaSegment& queued_media) override;
MediaStream* input_stream,
TrackID input_tid) override;
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) override {} virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) override {}
// Implement MediaStreamDirectListener // Implement MediaStreamDirectListener
@ -631,9 +629,7 @@ class MediaPipelineReceiveAudio : public MediaPipelineReceive {
virtual void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid, virtual void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
StreamTime offset, StreamTime offset,
uint32_t events, uint32_t events,
const MediaSegment& queued_media, const MediaSegment& queued_media) override {}
MediaStream* input_stream,
TrackID input_tid) override {}
virtual void NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) override; virtual void NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) override;
private: private:
@ -729,9 +725,7 @@ class MediaPipelineReceiveVideo : public MediaPipelineReceive {
virtual void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid, virtual void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
StreamTime offset, StreamTime offset,
uint32_t events, uint32_t events,
const MediaSegment& queued_media, const MediaSegment& queued_media) override {}
MediaStream* input_stream,
TrackID input_tid) override {}
virtual void NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) override; virtual void NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) override;
// Accessors for external writes from the renderer // Accessors for external writes from the renderer

Просмотреть файл

@ -461,7 +461,7 @@ MediaPipelineFactory::CreateMediaPipelineReceiving(
mPC->GetHandle(), mPC->GetHandle(),
mPC->GetMainThread().get(), mPC->GetMainThread().get(),
mPC->GetSTSThread(), mPC->GetSTSThread(),
stream->GetMediaStream()->GetInputStream(), stream->GetMediaStream()->GetStream(),
aTrack.GetTrackId(), aTrack.GetTrackId(),
numericTrackId, numericTrackId,
aLevel, aLevel,
@ -475,7 +475,7 @@ MediaPipelineFactory::CreateMediaPipelineReceiving(
mPC->GetHandle(), mPC->GetHandle(),
mPC->GetMainThread().get(), mPC->GetMainThread().get(),
mPC->GetSTSThread(), mPC->GetSTSThread(),
stream->GetMediaStream()->GetInputStream(), stream->GetMediaStream()->GetStream(),
aTrack.GetTrackId(), aTrack.GetTrackId(),
numericTrackId, numericTrackId,
aLevel, aLevel,

Просмотреть файл

@ -242,7 +242,7 @@ public:
// Start currentTime from the point where this stream was successfully // Start currentTime from the point where this stream was successfully
// returned. // returned.
aStream->SetLogicalStreamStartTime( aStream->SetLogicalStreamStartTime(
aStream->GetPlaybackStream()->GetCurrentTime()); aStream->GetStream()->GetCurrentTime());
JSErrorResult rv; JSErrorResult rv;
CSFLogInfo(logTag, "Calling OnAddStream(%s)", streamId.c_str()); CSFLogInfo(logTag, "Calling OnAddStream(%s)", streamId.c_str());
@ -471,7 +471,7 @@ PeerConnectionImpl::MakeMediaStream()
} }
#endif #endif
CSFLogDebug(logTag, "Created media stream %p, inner: %p", stream.get(), stream->GetInputStream()); CSFLogDebug(logTag, "Created media stream %p, inner: %p", stream.get(), stream->GetStream());
return stream.forget(); return stream.forget();
} }

Просмотреть файл

@ -1304,7 +1304,7 @@ RemoteSourceStreamInfo::StartReceiving()
mReceiving = true; mReceiving = true;
SourceMediaStream* source = GetMediaStream()->GetInputStream()->AsSourceStream(); SourceMediaStream* source = GetMediaStream()->GetStream()->AsSourceStream();
source->FinishAddTracks(); source->FinishAddTracks();
source->SetPullEnabled(true); source->SetPullEnabled(true);
// AdvanceKnownTracksTicksTime(HEAT_DEATH_OF_UNIVERSE) means that in // AdvanceKnownTracksTicksTime(HEAT_DEATH_OF_UNIVERSE) means that in

Просмотреть файл

@ -74,7 +74,6 @@ protected:
virtual ~Fake_VideoSink() {} virtual ~Fake_VideoSink() {}
}; };
class Fake_MediaStream;
class Fake_SourceMediaStream; class Fake_SourceMediaStream;
class Fake_MediaStreamListener class Fake_MediaStreamListener
@ -86,9 +85,7 @@ public:
virtual void NotifyQueuedTrackChanges(mozilla::MediaStreamGraph* aGraph, mozilla::TrackID aID, virtual void NotifyQueuedTrackChanges(mozilla::MediaStreamGraph* aGraph, mozilla::TrackID aID,
mozilla::StreamTime aTrackOffset, mozilla::StreamTime aTrackOffset,
uint32_t aTrackEvents, uint32_t aTrackEvents,
const mozilla::MediaSegment& aQueuedMedia, const mozilla::MediaSegment& aQueuedMedia) = 0;
Fake_MediaStream* aInputStream,
mozilla::TrackID aInputTrackID) = 0;
virtual void NotifyPull(mozilla::MediaStreamGraph* aGraph, mozilla::StreamTime aDesiredTime) = 0; virtual void NotifyPull(mozilla::MediaStreamGraph* aGraph, mozilla::StreamTime aDesiredTime) = 0;
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(Fake_MediaStreamListener) NS_INLINE_DECL_THREADSAFE_REFCOUNTING(Fake_MediaStreamListener)
@ -366,9 +363,6 @@ public:
virtual bool AddDirectListener(Fake_MediaStreamListener *aListener) { return false; } virtual bool AddDirectListener(Fake_MediaStreamListener *aListener) { return false; }
virtual void RemoveDirectListener(Fake_MediaStreamListener *aListener) {} virtual void RemoveDirectListener(Fake_MediaStreamListener *aListener) {}
Fake_MediaStream *GetInputStream() { return mMediaStream; }
Fake_MediaStream *GetOwnedStream() { return mMediaStream; }
Fake_MediaStream *GetPlaybackStream() { return mMediaStream; }
Fake_MediaStream *GetStream() { return mMediaStream; } Fake_MediaStream *GetStream() { return mMediaStream; }
std::string GetId() const { return mID; } std::string GetId() const { return mID; }
void AssignId(const std::string& id) { mID = id; } void AssignId(const std::string& id) { mID = id; }
@ -414,22 +408,6 @@ public:
void SetTrackEnabled(mozilla::TrackID aTrackID, bool aEnabled) {} void SetTrackEnabled(mozilla::TrackID aTrackID, bool aEnabled) {}
Fake_MediaStreamTrack*
CreateOwnDOMTrack(mozilla::TrackID aTrackID, mozilla::MediaSegment::Type aType)
{
switch(aType) {
case mozilla::MediaSegment::AUDIO: {
return mAudioTrack;
}
case mozilla::MediaSegment::VIDEO: {
return mVideoTrack;
}
default: {
MOZ_CRASH("Unkown media type");
}
}
}
class PrincipalChangeObserver class PrincipalChangeObserver
{ {
public: public:

Просмотреть файл

@ -116,9 +116,7 @@ void Fake_AudioStreamSource::Periodic() {
0, // TrackID 0, // TrackID
0, // Offset TODO(ekr@rtfm.com) fix 0, // Offset TODO(ekr@rtfm.com) fix
0, // ??? 0, // ???
segment, segment);
nullptr, // Input stream
-1); // Input track id
} }
} }