Backed out 2 changesets (bug 1601034) for causing gtest mediapipeline_unittest.cpp failures CLOSED TREE

Backed out changeset 6a429cfa6440 (bug 1601034)
Backed out changeset e17753103d27 (bug 1601034)
This commit is contained in:
Ciure Andrei 2019-12-12 18:48:01 +02:00
Родитель 6c95adafed
Коммит dd135cd361
3 изменённых файлов: 56 добавлений и 120 удалений

Просмотреть файл

@ -711,11 +711,11 @@ class MediaPipelineTransmit::PipelineListener
->SendVideoFrame(aVideoFrame); ->SendVideoFrame(aVideoFrame);
} }
void SetTrackEnabled(MediaStreamTrack* aTrack, bool aEnabled);
// Implement MediaTrackListener // Implement MediaTrackListener
void NotifyQueuedChanges(MediaTrackGraph* aGraph, TrackTime aOffset, void NotifyQueuedChanges(MediaTrackGraph* aGraph, TrackTime aOffset,
const MediaSegment& aQueuedMedia) override; const MediaSegment& aQueuedMedia) override;
void NotifyEnabledStateChanged(MediaTrackGraph* aGraph,
bool aEnabled) override;
// Implement DirectMediaTrackListener // Implement DirectMediaTrackListener
void NotifyRealtimeTrackData(MediaTrackGraph* aGraph, TrackTime aOffset, void NotifyRealtimeTrackData(MediaTrackGraph* aGraph, TrackTime aOffset,
@ -740,29 +740,6 @@ class MediaPipelineTransmit::PipelineListener
bool mDirectConnect; bool mDirectConnect;
}; };
// MediaStreamTrackConsumer inherits from SupportsWeakPtr, which is
// main-thread-only.
class MediaPipelineTransmit::PipelineListenerTrackConsumer
: public MediaStreamTrackConsumer {
virtual ~PipelineListenerTrackConsumer() { MOZ_ASSERT(NS_IsMainThread()); }
const RefPtr<PipelineListener> mListener;
public:
NS_INLINE_DECL_REFCOUNTING(PipelineListenerTrackConsumer)
explicit PipelineListenerTrackConsumer(RefPtr<PipelineListener> aListener)
: mListener(std::move(aListener)) {
MOZ_ASSERT(NS_IsMainThread());
}
// Implement MediaStreamTrackConsumer
void NotifyEnabledChanged(MediaStreamTrack* aTrack, bool aEnabled) override {
MOZ_ASSERT(NS_IsMainThread());
mListener->SetTrackEnabled(aTrack, aEnabled);
}
};
// Implements VideoConverterListener for MediaPipeline. // Implements VideoConverterListener for MediaPipeline.
// //
// We pass converted frames on to MediaPipelineTransmit::PipelineListener // We pass converted frames on to MediaPipelineTransmit::PipelineListener
@ -809,10 +786,6 @@ MediaPipelineTransmit::MediaPipelineTransmit(
std::move(aConduit)), std::move(aConduit)),
mIsVideo(aIsVideo), mIsVideo(aIsVideo),
mListener(new PipelineListener(mConduit)), mListener(new PipelineListener(mConduit)),
mTrackConsumer(
MakeAndAddRef<nsMainThreadPtrHolder<PipelineListenerTrackConsumer>>(
"MediaPipelineTransmit::mTrackConsumer",
MakeAndAddRef<PipelineListenerTrackConsumer>(mListener))),
mFeeder(aIsVideo ? MakeAndAddRef<VideoFrameFeeder>(mListener) mFeeder(aIsVideo ? MakeAndAddRef<VideoFrameFeeder>(mListener)
: nullptr), // For video we send frames to an : nullptr), // For video we send frames to an
// async VideoFrameConverter that // async VideoFrameConverter that
@ -870,22 +843,23 @@ void MediaPipelineTransmit::SetDescription() {
void MediaPipelineTransmit::Stop() { void MediaPipelineTransmit::Stop() {
ASSERT_ON_THREAD(mMainThread); ASSERT_ON_THREAD(mMainThread);
if (!mTransmitting) { if (!mDomTrack || !mTransmitting) {
return;
}
if (!mSendTrack) {
return; return;
} }
mTransmitting = false; mTransmitting = false;
mConduit->StopTransmitting();
mSendTrack->Suspend(); if (mDomTrack->AsAudioStreamTrack()) {
if (mSendTrack->mType == MediaSegment::VIDEO) { mDomTrack->RemoveDirectListener(mListener);
mSendTrack->RemoveDirectListener(mListener); mDomTrack->RemoveListener(mListener);
} else if (mDomTrack->AsVideoStreamTrack()) {
mDomTrack->RemoveDirectListener(mListener);
mDomTrack->RemoveListener(mListener);
} else {
MOZ_ASSERT(false, "Unknown track type");
} }
mSendTrack->RemoveListener(mListener);
mConduit->StopTransmitting();
} }
bool MediaPipelineTransmit::Transmitting() const { bool MediaPipelineTransmit::Transmitting() const {
@ -897,15 +871,12 @@ bool MediaPipelineTransmit::Transmitting() const {
void MediaPipelineTransmit::Start() { void MediaPipelineTransmit::Start() {
ASSERT_ON_THREAD(mMainThread); ASSERT_ON_THREAD(mMainThread);
if (mTransmitting) { if (!mDomTrack || mTransmitting) {
return;
}
if (!mSendTrack) {
return; return;
} }
mTransmitting = true; mTransmitting = true;
mConduit->StartTransmitting(); mConduit->StartTransmitting();
// TODO(ekr@rtfm.com): Check for errors // TODO(ekr@rtfm.com): Check for errors
@ -914,12 +885,30 @@ void MediaPipelineTransmit::Start() {
("Attaching pipeline to track %p conduit type=%s", this, ("Attaching pipeline to track %p conduit type=%s", this,
(mConduit->type() == MediaSessionConduit::AUDIO ? "audio" : "video"))); (mConduit->type() == MediaSessionConduit::AUDIO ? "audio" : "video")));
mSendTrack->Resume(); #if !defined(MOZILLA_EXTERNAL_LINKAGE)
// With full duplex we don't risk having audio come in late to the MTG
// so we won't need a direct listener.
const bool enableDirectListener =
!Preferences::GetBool("media.navigator.audio.full_duplex", false);
#else
const bool enableDirectListener = true;
#endif
if (mSendTrack->mType == MediaSegment::VIDEO) { if (mDomTrack->AsAudioStreamTrack()) {
mSendTrack->AddDirectListener(mListener); if (enableDirectListener) {
// Register the Listener directly with the source if we can.
// We also register it as a non-direct listener so we fall back to that
// if installing the direct listener fails. As a direct listener we get
// access to direct unqueued (and not resampled) data.
mDomTrack->AddDirectListener(mListener);
}
mDomTrack->AddListener(mListener);
} else if (mDomTrack->AsVideoStreamTrack()) {
mDomTrack->AddDirectListener(mListener);
mDomTrack->AddListener(mListener);
} else {
MOZ_ASSERT(false, "Unknown track type");
} }
mSendTrack->AddListener(mListener);
} }
bool MediaPipelineTransmit::IsVideo() const { return mIsVideo; } bool MediaPipelineTransmit::IsVideo() const { return mIsVideo; }
@ -958,19 +947,7 @@ void MediaPipelineTransmit::UpdateSinkIdentity_m(
void MediaPipelineTransmit::DetachMedia() { void MediaPipelineTransmit::DetachMedia() {
ASSERT_ON_THREAD(mMainThread); ASSERT_ON_THREAD(mMainThread);
MOZ_ASSERT(!mTransmitting); mDomTrack = nullptr;
if (mDomTrack) {
mDomTrack->RemoveConsumer(mTrackConsumer);
mDomTrack = nullptr;
}
if (mSendPort) {
mSendPort->Destroy();
mSendPort = nullptr;
}
if (mSendTrack) {
mSendTrack->Destroy();
mSendTrack = nullptr;
}
// Let the listener be destroyed with the pipeline (or later). // Let the listener be destroyed with the pipeline (or later).
} }
@ -982,8 +959,7 @@ void MediaPipelineTransmit::TransportReady_s() {
} }
nsresult MediaPipelineTransmit::SetTrack(RefPtr<MediaStreamTrack> aDomTrack) { nsresult MediaPipelineTransmit::SetTrack(RefPtr<MediaStreamTrack> aDomTrack) {
MOZ_ASSERT(NS_IsMainThread()); // MainThread, checked in calls we make
if (aDomTrack) { if (aDomTrack) {
nsString nsTrackId; nsString nsTrackId;
aDomTrack->GetId(nsTrackId); aDomTrack->GetId(nsTrackId);
@ -995,35 +971,15 @@ nsresult MediaPipelineTransmit::SetTrack(RefPtr<MediaStreamTrack> aDomTrack) {
(mConduit->type() == MediaSessionConduit::AUDIO ? "audio" : "video"))); (mConduit->type() == MediaSessionConduit::AUDIO ? "audio" : "video")));
} }
if (mDomTrack) { RefPtr<dom::MediaStreamTrack> oldTrack = mDomTrack;
mDomTrack->RemoveConsumer(mTrackConsumer); bool wasTransmitting = oldTrack && mTransmitting;
} Stop();
if (mSendPort) {
mSendPort->Destroy();
mSendPort = nullptr;
}
mDomTrack = std::move(aDomTrack); mDomTrack = std::move(aDomTrack);
SetDescription(); SetDescription();
if (mDomTrack) { if (wasTransmitting) {
if (!mDomTrack->Ended()) { Start();
if (!mSendTrack) {
// Create the send track only once; when the first live track is set.
MOZ_ASSERT(!mTransmitting);
mSendTrack = mDomTrack->Graph()->CreateForwardedInputTrack(
mDomTrack->GetTrack()->mType);
mSendTrack->QueueSetAutoend(false);
mSendTrack->Suspend(); // Suspended while not transmitting.
}
mSendPort = mSendTrack->AllocateInputPort(mDomTrack->GetTrack());
}
mDomTrack->AddConsumer(mTrackConsumer);
if (mConverter) {
mConverter->SetTrackEnabled(mDomTrack->Enabled());
}
} }
return NS_OK; return NS_OK;
} }
@ -1142,13 +1098,11 @@ void MediaPipelineTransmit::PipelineListener::NotifyQueuedChanges(
NewData(aQueuedMedia, rate); NewData(aQueuedMedia, rate);
} }
void MediaPipelineTransmit::PipelineListener::SetTrackEnabled( void MediaPipelineTransmit::PipelineListener::NotifyEnabledStateChanged(
MediaStreamTrack* aTrack, bool aEnabled) { MediaTrackGraph* aGraph, bool aEnabled) {
MOZ_ASSERT(NS_IsMainThread());
if (mConduit->type() != MediaSessionConduit::VIDEO) { if (mConduit->type() != MediaSessionConduit::VIDEO) {
return; return;
} }
MOZ_ASSERT(mConverter); MOZ_ASSERT(mConverter);
mConverter->SetTrackEnabled(aEnabled); mConverter->SetTrackEnabled(aEnabled);
} }

Просмотреть файл

@ -32,13 +32,10 @@
class nsIPrincipal; class nsIPrincipal;
namespace mozilla { namespace mozilla {
class AudioProxyThread;
class MediaInputPort;
class MediaPipelineFilter; class MediaPipelineFilter;
class MediaTransportHandler; class MediaTransportHandler;
class PeerIdentity; class PeerIdentity;
class ProcessedMediaTrack; class AudioProxyThread;
class SourceMediaTrack;
class VideoFrameConverter; class VideoFrameConverter;
namespace dom { namespace dom {
@ -46,6 +43,8 @@ class MediaStreamTrack;
struct RTCRTPContributingSourceStats; struct RTCRTPContributingSourceStats;
} // namespace dom } // namespace dom
class SourceMediaTrack;
// A class that represents the pipeline of audio and video // A class that represents the pipeline of audio and video
// The dataflow looks like: // The dataflow looks like:
// //
@ -308,7 +307,6 @@ class MediaPipelineTransmit : public MediaPipeline {
// Separate classes to allow ref counting // Separate classes to allow ref counting
class PipelineListener; class PipelineListener;
class PipelineListenerTrackConsumer;
class VideoFrameFeeder; class VideoFrameFeeder;
protected: protected:
@ -319,18 +317,10 @@ class MediaPipelineTransmit : public MediaPipeline {
private: private:
const bool mIsVideo; const bool mIsVideo;
const RefPtr<PipelineListener> mListener; const RefPtr<PipelineListener> mListener;
// Listens for changes in enabled state on the attached MediaStreamTrack, and
// notifies mListener.
const nsMainThreadPtrHandle<PipelineListenerTrackConsumer> mTrackConsumer;
const RefPtr<VideoFrameFeeder> mFeeder; const RefPtr<VideoFrameFeeder> mFeeder;
RefPtr<AudioProxyThread> mAudioProcessing; RefPtr<AudioProxyThread> mAudioProcessing;
RefPtr<VideoFrameConverter> mConverter; RefPtr<VideoFrameConverter> mConverter;
RefPtr<dom::MediaStreamTrack> mDomTrack; RefPtr<dom::MediaStreamTrack> mDomTrack;
// Input port connecting mDomTrack's MediaTrack to mSendTrack.
RefPtr<MediaInputPort> mSendPort;
// MediaTrack that we send over the network. This allows changing mDomTrack.
RefPtr<ProcessedMediaTrack> mSendTrack;
// True if we're actively transmitting data to the network. Main thread only.
bool mTransmitting; bool mTransmitting;
}; };

Просмотреть файл

@ -1929,21 +1929,13 @@ PeerConnectionImpl::ReplaceTrackNoRenegotiation(TransceiverImpl& aTransceiver,
PrincipalChanged(aWithTrack); PrincipalChanged(aWithTrack);
} }
if (aTransceiver.IsVideo()) { // We update the media pipelines here so we can apply different codec
// We update the media pipelines here so we can apply different codec // settings for different sources (e.g. screensharing as opposed to camera.)
// settings for different sources (e.g. screensharing as opposed to camera.) // TODO: We should probably only do this if the source has in fact changed.
MediaSourceEnum oldSource = oldSendTrack
? oldSendTrack->GetSource().GetMediaSource() if (NS_FAILED((rv = mMedia->UpdateMediaPipelines()))) {
: MediaSourceEnum::Camera; CSFLogError(LOGTAG, "Error Updating MediaPipelines");
MediaSourceEnum newSource = aWithTrack return rv;
? aWithTrack->GetSource().GetMediaSource()
: MediaSourceEnum::Camera;
if (oldSource != newSource) {
if (NS_WARN_IF(NS_FAILED(rv = aTransceiver.UpdateConduit()))) {
CSFLogError(LOGTAG, "Error Updating VideoConduit");
return rv;
}
}
} }
return NS_OK; return NS_OK;