Bug 1601034 - Add a ProcessedMediaTrack layer in MediaPipelineTransmit to handle replaceTrack of ended tracks. r=dminor,padenot

Before this patch, if a send audio MediaStreamTrack ended, we ended up not
sending anything over the network. If replaceTrack() at that point replaced the
ended track with a live one, we'd start sending data again, but the rtp stream
would continue from where the previous track ended.

Having a gap in audio like that would confuse a receiver's *video* jitter
buffer, because it's trying to sync to an audio track that just had a massive
amount of "jitter" (it can't tell the difference).

This patch fixes this by adding a track layer in MediaPipelineTransmit that
remains active for as long as the MediaPipeline is active. Thus if the send
audio MediaStreamTrack ends, we continue sending silence over the network, which
the receiver can understand. If later replaced, the receiver sees real audio
instead of silence and continues gracefully.

Differential Revision: https://phabricator.services.mozilla.com/D56619

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Andreas Pehrson 2019-12-12 15:23:43 +00:00
Родитель 7b36ee37f3
Коммит 02224ebfcd
2 изменённых файлов: 106 добавлений и 50 удалений

Просмотреть файл

@ -711,11 +711,11 @@ class MediaPipelineTransmit::PipelineListener
->SendVideoFrame(aVideoFrame);
}
void SetTrackEnabled(MediaStreamTrack* aTrack, bool aEnabled);
// Implement MediaTrackListener
void NotifyQueuedChanges(MediaTrackGraph* aGraph, TrackTime aOffset,
const MediaSegment& aQueuedMedia) override;
void NotifyEnabledStateChanged(MediaTrackGraph* aGraph,
bool aEnabled) override;
// Implement DirectMediaTrackListener
void NotifyRealtimeTrackData(MediaTrackGraph* aGraph, TrackTime aOffset,
@ -740,6 +740,29 @@ class MediaPipelineTransmit::PipelineListener
bool mDirectConnect;
};
// MediaStreamTrackConsumer inherits from SupportsWeakPtr, which is
// main-thread-only.
class MediaPipelineTransmit::PipelineListenerTrackConsumer
: public MediaStreamTrackConsumer {
virtual ~PipelineListenerTrackConsumer() { MOZ_ASSERT(NS_IsMainThread()); }
const RefPtr<PipelineListener> mListener;
public:
NS_INLINE_DECL_REFCOUNTING(PipelineListenerTrackConsumer)
explicit PipelineListenerTrackConsumer(RefPtr<PipelineListener> aListener)
: mListener(std::move(aListener)) {
MOZ_ASSERT(NS_IsMainThread());
}
// Implement MediaStreamTrackConsumer
void NotifyEnabledChanged(MediaStreamTrack* aTrack, bool aEnabled) override {
MOZ_ASSERT(NS_IsMainThread());
mListener->SetTrackEnabled(aTrack, aEnabled);
}
};
// Implements VideoConverterListener for MediaPipeline.
//
// We pass converted frames on to MediaPipelineTransmit::PipelineListener
@ -786,6 +809,10 @@ MediaPipelineTransmit::MediaPipelineTransmit(
std::move(aConduit)),
mIsVideo(aIsVideo),
mListener(new PipelineListener(mConduit)),
mTrackConsumer(
MakeAndAddRef<nsMainThreadPtrHolder<PipelineListenerTrackConsumer>>(
"MediaPipelineTransmit::mTrackConsumer",
MakeAndAddRef<PipelineListenerTrackConsumer>(mListener))),
mFeeder(aIsVideo ? MakeAndAddRef<VideoFrameFeeder>(mListener)
: nullptr), // For video we send frames to an
// async VideoFrameConverter that
@ -843,23 +870,22 @@ void MediaPipelineTransmit::SetDescription() {
void MediaPipelineTransmit::Stop() {
ASSERT_ON_THREAD(mMainThread);
if (!mDomTrack || !mTransmitting) {
if (!mTransmitting) {
return;
}
if (!mSendTrack) {
return;
}
mTransmitting = false;
if (mDomTrack->AsAudioStreamTrack()) {
mDomTrack->RemoveDirectListener(mListener);
mDomTrack->RemoveListener(mListener);
} else if (mDomTrack->AsVideoStreamTrack()) {
mDomTrack->RemoveDirectListener(mListener);
mDomTrack->RemoveListener(mListener);
} else {
MOZ_ASSERT(false, "Unknown track type");
}
mConduit->StopTransmitting();
mSendTrack->Suspend();
if (mSendTrack->mType == MediaSegment::VIDEO) {
mSendTrack->RemoveDirectListener(mListener);
}
mSendTrack->RemoveListener(mListener);
}
bool MediaPipelineTransmit::Transmitting() const {
@ -871,12 +897,15 @@ bool MediaPipelineTransmit::Transmitting() const {
void MediaPipelineTransmit::Start() {
ASSERT_ON_THREAD(mMainThread);
if (!mDomTrack || mTransmitting) {
if (mTransmitting) {
return;
}
if (!mSendTrack) {
return;
}
mTransmitting = true;
mConduit->StartTransmitting();
// TODO(ekr@rtfm.com): Check for errors
@ -885,30 +914,12 @@ void MediaPipelineTransmit::Start() {
("Attaching pipeline to track %p conduit type=%s", this,
(mConduit->type() == MediaSessionConduit::AUDIO ? "audio" : "video")));
#if !defined(MOZILLA_EXTERNAL_LINKAGE)
// With full duplex we don't risk having audio come in late to the MTG
// so we won't need a direct listener.
const bool enableDirectListener =
!Preferences::GetBool("media.navigator.audio.full_duplex", false);
#else
const bool enableDirectListener = true;
#endif
mSendTrack->Resume();
if (mDomTrack->AsAudioStreamTrack()) {
if (enableDirectListener) {
// Register the Listener directly with the source if we can.
// We also register it as a non-direct listener so we fall back to that
// if installing the direct listener fails. As a direct listener we get
// access to direct unqueued (and not resampled) data.
mDomTrack->AddDirectListener(mListener);
}
mDomTrack->AddListener(mListener);
} else if (mDomTrack->AsVideoStreamTrack()) {
mDomTrack->AddDirectListener(mListener);
mDomTrack->AddListener(mListener);
} else {
MOZ_ASSERT(false, "Unknown track type");
if (mSendTrack->mType == MediaSegment::VIDEO) {
mSendTrack->AddDirectListener(mListener);
}
mSendTrack->AddListener(mListener);
}
bool MediaPipelineTransmit::IsVideo() const { return mIsVideo; }
@ -947,7 +958,19 @@ void MediaPipelineTransmit::UpdateSinkIdentity_m(
void MediaPipelineTransmit::DetachMedia() {
ASSERT_ON_THREAD(mMainThread);
MOZ_ASSERT(!mTransmitting);
if (mDomTrack) {
mDomTrack->RemoveConsumer(mTrackConsumer);
mDomTrack = nullptr;
}
if (mSendPort) {
mSendPort->Destroy();
mSendPort = nullptr;
}
if (mSendTrack) {
mSendTrack->Destroy();
mSendTrack = nullptr;
}
// Let the listener be destroyed with the pipeline (or later).
}
@ -959,7 +982,8 @@ void MediaPipelineTransmit::TransportReady_s() {
}
nsresult MediaPipelineTransmit::SetTrack(RefPtr<MediaStreamTrack> aDomTrack) {
// MainThread, checked in calls we make
MOZ_ASSERT(NS_IsMainThread());
if (aDomTrack) {
nsString nsTrackId;
aDomTrack->GetId(nsTrackId);
@ -971,15 +995,35 @@ nsresult MediaPipelineTransmit::SetTrack(RefPtr<MediaStreamTrack> aDomTrack) {
(mConduit->type() == MediaSessionConduit::AUDIO ? "audio" : "video")));
}
RefPtr<dom::MediaStreamTrack> oldTrack = mDomTrack;
bool wasTransmitting = oldTrack && mTransmitting;
Stop();
if (mDomTrack) {
mDomTrack->RemoveConsumer(mTrackConsumer);
}
if (mSendPort) {
mSendPort->Destroy();
mSendPort = nullptr;
}
mDomTrack = std::move(aDomTrack);
SetDescription();
if (wasTransmitting) {
Start();
if (mDomTrack) {
if (!mDomTrack->Ended()) {
if (!mSendTrack) {
// Create the send track only once; when the first live track is set.
MOZ_ASSERT(!mTransmitting);
mSendTrack = mDomTrack->Graph()->CreateForwardedInputTrack(
mDomTrack->GetTrack()->mType);
mSendTrack->QueueSetAutoend(false);
mSendTrack->Suspend(); // Suspended while not transmitting.
}
mSendPort = mSendTrack->AllocateInputPort(mDomTrack->GetTrack());
}
mDomTrack->AddConsumer(mTrackConsumer);
if (mConverter) {
mConverter->SetTrackEnabled(mDomTrack->Enabled());
}
}
return NS_OK;
}
@ -1098,11 +1142,13 @@ void MediaPipelineTransmit::PipelineListener::NotifyQueuedChanges(
NewData(aQueuedMedia, rate);
}
void MediaPipelineTransmit::PipelineListener::NotifyEnabledStateChanged(
MediaTrackGraph* aGraph, bool aEnabled) {
void MediaPipelineTransmit::PipelineListener::SetTrackEnabled(
MediaStreamTrack* aTrack, bool aEnabled) {
MOZ_ASSERT(NS_IsMainThread());
if (mConduit->type() != MediaSessionConduit::VIDEO) {
return;
}
MOZ_ASSERT(mConverter);
mConverter->SetTrackEnabled(aEnabled);
}

Просмотреть файл

@ -32,10 +32,13 @@
class nsIPrincipal;
namespace mozilla {
class AudioProxyThread;
class MediaInputPort;
class MediaPipelineFilter;
class MediaTransportHandler;
class PeerIdentity;
class AudioProxyThread;
class ProcessedMediaTrack;
class SourceMediaTrack;
class VideoFrameConverter;
namespace dom {
@ -43,8 +46,6 @@ class MediaStreamTrack;
struct RTCRTPContributingSourceStats;
} // namespace dom
class SourceMediaTrack;
// A class that represents the pipeline of audio and video
// The dataflow looks like:
//
@ -307,6 +308,7 @@ class MediaPipelineTransmit : public MediaPipeline {
// Separate classes to allow ref counting
class PipelineListener;
class PipelineListenerTrackConsumer;
class VideoFrameFeeder;
protected:
@ -317,10 +319,18 @@ class MediaPipelineTransmit : public MediaPipeline {
private:
const bool mIsVideo;
const RefPtr<PipelineListener> mListener;
// Listens for changes in enabled state on the attached MediaStreamTrack, and
// notifies mListener.
const nsMainThreadPtrHandle<PipelineListenerTrackConsumer> mTrackConsumer;
const RefPtr<VideoFrameFeeder> mFeeder;
RefPtr<AudioProxyThread> mAudioProcessing;
RefPtr<VideoFrameConverter> mConverter;
RefPtr<dom::MediaStreamTrack> mDomTrack;
// Input port connecting mDomTrack's MediaTrack to mSendTrack.
RefPtr<MediaInputPort> mSendPort;
// MediaTrack that we send over the network. This allows changing mDomTrack.
RefPtr<ProcessedMediaTrack> mSendTrack;
// True if we're actively transmitting data to the network. Main thread only.
bool mTransmitting;
};