From dd135cd361723842f1d19e06c262b7ab96ea0374 Mon Sep 17 00:00:00 2001 From: Ciure Andrei Date: Thu, 12 Dec 2019 18:48:01 +0200 Subject: [PATCH] Backed out 2 changesets (bug 1601034) for causing gtest mediapipeline_unittest.cpp failures CLOSED TREE Backed out changeset 6a429cfa6440 (bug 1601034) Backed out changeset e17753103d27 (bug 1601034) --- .../src/mediapipeline/MediaPipeline.cpp | 138 ++++++------------ .../src/mediapipeline/MediaPipeline.h | 16 +- .../src/peerconnection/PeerConnectionImpl.cpp | 22 +-- 3 files changed, 56 insertions(+), 120 deletions(-) diff --git a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp index 8b490f6f1889..1021dc1a2f30 100644 --- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp +++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp @@ -711,11 +711,11 @@ class MediaPipelineTransmit::PipelineListener ->SendVideoFrame(aVideoFrame); } - void SetTrackEnabled(MediaStreamTrack* aTrack, bool aEnabled); - // Implement MediaTrackListener void NotifyQueuedChanges(MediaTrackGraph* aGraph, TrackTime aOffset, const MediaSegment& aQueuedMedia) override; + void NotifyEnabledStateChanged(MediaTrackGraph* aGraph, + bool aEnabled) override; // Implement DirectMediaTrackListener void NotifyRealtimeTrackData(MediaTrackGraph* aGraph, TrackTime aOffset, @@ -740,29 +740,6 @@ class MediaPipelineTransmit::PipelineListener bool mDirectConnect; }; -// MediaStreamTrackConsumer inherits from SupportsWeakPtr, which is -// main-thread-only. -class MediaPipelineTransmit::PipelineListenerTrackConsumer - : public MediaStreamTrackConsumer { - virtual ~PipelineListenerTrackConsumer() { MOZ_ASSERT(NS_IsMainThread()); } - - const RefPtr mListener; - - public: - NS_INLINE_DECL_REFCOUNTING(PipelineListenerTrackConsumer) - - explicit PipelineListenerTrackConsumer(RefPtr aListener) - : mListener(std::move(aListener)) { - MOZ_ASSERT(NS_IsMainThread()); - } - - // Implement MediaStreamTrackConsumer - void NotifyEnabledChanged(MediaStreamTrack* aTrack, bool aEnabled) override { - MOZ_ASSERT(NS_IsMainThread()); - mListener->SetTrackEnabled(aTrack, aEnabled); - } -}; - // Implements VideoConverterListener for MediaPipeline. // // We pass converted frames on to MediaPipelineTransmit::PipelineListener @@ -809,10 +786,6 @@ MediaPipelineTransmit::MediaPipelineTransmit( std::move(aConduit)), mIsVideo(aIsVideo), mListener(new PipelineListener(mConduit)), - mTrackConsumer( - MakeAndAddRef>( - "MediaPipelineTransmit::mTrackConsumer", - MakeAndAddRef(mListener))), mFeeder(aIsVideo ? MakeAndAddRef(mListener) : nullptr), // For video we send frames to an // async VideoFrameConverter that @@ -870,22 +843,23 @@ void MediaPipelineTransmit::SetDescription() { void MediaPipelineTransmit::Stop() { ASSERT_ON_THREAD(mMainThread); - if (!mTransmitting) { - return; - } - - if (!mSendTrack) { + if (!mDomTrack || !mTransmitting) { return; } mTransmitting = false; - mConduit->StopTransmitting(); - mSendTrack->Suspend(); - if (mSendTrack->mType == MediaSegment::VIDEO) { - mSendTrack->RemoveDirectListener(mListener); + if (mDomTrack->AsAudioStreamTrack()) { + mDomTrack->RemoveDirectListener(mListener); + mDomTrack->RemoveListener(mListener); + } else if (mDomTrack->AsVideoStreamTrack()) { + mDomTrack->RemoveDirectListener(mListener); + mDomTrack->RemoveListener(mListener); + } else { + MOZ_ASSERT(false, "Unknown track type"); } - mSendTrack->RemoveListener(mListener); + + mConduit->StopTransmitting(); } bool MediaPipelineTransmit::Transmitting() const { @@ -897,15 +871,12 @@ bool MediaPipelineTransmit::Transmitting() const { void MediaPipelineTransmit::Start() { ASSERT_ON_THREAD(mMainThread); - if (mTransmitting) { - return; - } - - if (!mSendTrack) { + if (!mDomTrack || mTransmitting) { return; } mTransmitting = true; + mConduit->StartTransmitting(); // TODO(ekr@rtfm.com): Check for errors @@ -914,12 +885,30 @@ void MediaPipelineTransmit::Start() { ("Attaching pipeline to track %p conduit type=%s", this, (mConduit->type() == MediaSessionConduit::AUDIO ? "audio" : "video"))); - mSendTrack->Resume(); +#if !defined(MOZILLA_EXTERNAL_LINKAGE) + // With full duplex we don't risk having audio come in late to the MTG + // so we won't need a direct listener. + const bool enableDirectListener = + !Preferences::GetBool("media.navigator.audio.full_duplex", false); +#else + const bool enableDirectListener = true; +#endif - if (mSendTrack->mType == MediaSegment::VIDEO) { - mSendTrack->AddDirectListener(mListener); + if (mDomTrack->AsAudioStreamTrack()) { + if (enableDirectListener) { + // Register the Listener directly with the source if we can. + // We also register it as a non-direct listener so we fall back to that + // if installing the direct listener fails. As a direct listener we get + // access to direct unqueued (and not resampled) data. + mDomTrack->AddDirectListener(mListener); + } + mDomTrack->AddListener(mListener); + } else if (mDomTrack->AsVideoStreamTrack()) { + mDomTrack->AddDirectListener(mListener); + mDomTrack->AddListener(mListener); + } else { + MOZ_ASSERT(false, "Unknown track type"); } - mSendTrack->AddListener(mListener); } bool MediaPipelineTransmit::IsVideo() const { return mIsVideo; } @@ -958,19 +947,7 @@ void MediaPipelineTransmit::UpdateSinkIdentity_m( void MediaPipelineTransmit::DetachMedia() { ASSERT_ON_THREAD(mMainThread); - MOZ_ASSERT(!mTransmitting); - if (mDomTrack) { - mDomTrack->RemoveConsumer(mTrackConsumer); - mDomTrack = nullptr; - } - if (mSendPort) { - mSendPort->Destroy(); - mSendPort = nullptr; - } - if (mSendTrack) { - mSendTrack->Destroy(); - mSendTrack = nullptr; - } + mDomTrack = nullptr; // Let the listener be destroyed with the pipeline (or later). } @@ -982,8 +959,7 @@ void MediaPipelineTransmit::TransportReady_s() { } nsresult MediaPipelineTransmit::SetTrack(RefPtr aDomTrack) { - MOZ_ASSERT(NS_IsMainThread()); - + // MainThread, checked in calls we make if (aDomTrack) { nsString nsTrackId; aDomTrack->GetId(nsTrackId); @@ -995,35 +971,15 @@ nsresult MediaPipelineTransmit::SetTrack(RefPtr aDomTrack) { (mConduit->type() == MediaSessionConduit::AUDIO ? "audio" : "video"))); } - if (mDomTrack) { - mDomTrack->RemoveConsumer(mTrackConsumer); - } - if (mSendPort) { - mSendPort->Destroy(); - mSendPort = nullptr; - } - + RefPtr oldTrack = mDomTrack; + bool wasTransmitting = oldTrack && mTransmitting; + Stop(); mDomTrack = std::move(aDomTrack); SetDescription(); - if (mDomTrack) { - if (!mDomTrack->Ended()) { - if (!mSendTrack) { - // Create the send track only once; when the first live track is set. - MOZ_ASSERT(!mTransmitting); - mSendTrack = mDomTrack->Graph()->CreateForwardedInputTrack( - mDomTrack->GetTrack()->mType); - mSendTrack->QueueSetAutoend(false); - mSendTrack->Suspend(); // Suspended while not transmitting. - } - mSendPort = mSendTrack->AllocateInputPort(mDomTrack->GetTrack()); - } - mDomTrack->AddConsumer(mTrackConsumer); - if (mConverter) { - mConverter->SetTrackEnabled(mDomTrack->Enabled()); - } + if (wasTransmitting) { + Start(); } - return NS_OK; } @@ -1142,13 +1098,11 @@ void MediaPipelineTransmit::PipelineListener::NotifyQueuedChanges( NewData(aQueuedMedia, rate); } -void MediaPipelineTransmit::PipelineListener::SetTrackEnabled( - MediaStreamTrack* aTrack, bool aEnabled) { - MOZ_ASSERT(NS_IsMainThread()); +void MediaPipelineTransmit::PipelineListener::NotifyEnabledStateChanged( + MediaTrackGraph* aGraph, bool aEnabled) { if (mConduit->type() != MediaSessionConduit::VIDEO) { return; } - MOZ_ASSERT(mConverter); mConverter->SetTrackEnabled(aEnabled); } diff --git a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.h b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.h index a8db70c51901..bc709299df50 100644 --- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.h +++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.h @@ -32,13 +32,10 @@ class nsIPrincipal; namespace mozilla { -class AudioProxyThread; -class MediaInputPort; class MediaPipelineFilter; class MediaTransportHandler; class PeerIdentity; -class ProcessedMediaTrack; -class SourceMediaTrack; +class AudioProxyThread; class VideoFrameConverter; namespace dom { @@ -46,6 +43,8 @@ class MediaStreamTrack; struct RTCRTPContributingSourceStats; } // namespace dom +class SourceMediaTrack; + // A class that represents the pipeline of audio and video // The dataflow looks like: // @@ -308,7 +307,6 @@ class MediaPipelineTransmit : public MediaPipeline { // Separate classes to allow ref counting class PipelineListener; - class PipelineListenerTrackConsumer; class VideoFrameFeeder; protected: @@ -319,18 +317,10 @@ class MediaPipelineTransmit : public MediaPipeline { private: const bool mIsVideo; const RefPtr mListener; - // Listens for changes in enabled state on the attached MediaStreamTrack, and - // notifies mListener. - const nsMainThreadPtrHandle mTrackConsumer; const RefPtr mFeeder; RefPtr mAudioProcessing; RefPtr mConverter; RefPtr mDomTrack; - // Input port connecting mDomTrack's MediaTrack to mSendTrack. - RefPtr mSendPort; - // MediaTrack that we send over the network. This allows changing mDomTrack. - RefPtr mSendTrack; - // True if we're actively transmitting data to the network. Main thread only. bool mTransmitting; }; diff --git a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp index 07eee907b358..3d5db9751c49 100644 --- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp +++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp @@ -1929,21 +1929,13 @@ PeerConnectionImpl::ReplaceTrackNoRenegotiation(TransceiverImpl& aTransceiver, PrincipalChanged(aWithTrack); } - if (aTransceiver.IsVideo()) { - // We update the media pipelines here so we can apply different codec - // settings for different sources (e.g. screensharing as opposed to camera.) - MediaSourceEnum oldSource = oldSendTrack - ? oldSendTrack->GetSource().GetMediaSource() - : MediaSourceEnum::Camera; - MediaSourceEnum newSource = aWithTrack - ? aWithTrack->GetSource().GetMediaSource() - : MediaSourceEnum::Camera; - if (oldSource != newSource) { - if (NS_WARN_IF(NS_FAILED(rv = aTransceiver.UpdateConduit()))) { - CSFLogError(LOGTAG, "Error Updating VideoConduit"); - return rv; - } - } + // We update the media pipelines here so we can apply different codec + // settings for different sources (e.g. screensharing as opposed to camera.) + // TODO: We should probably only do this if the source has in fact changed. + + if (NS_FAILED((rv = mMedia->UpdateMediaPipelines()))) { + CSFLogError(LOGTAG, "Error Updating MediaPipelines"); + return rv; } return NS_OK;