Bug 1290948 - Part 5: TransceiverImpl and some major refactoring. r+drno r=drno

MozReview-Commit-ID: 3IBAch7xVNG

--HG--
extra : rebase_source : 7379357abe05ef043260a13faf2bcebbcd2b1d39
This commit is contained in:
Byron Campen [:bwc] 2017-08-23 16:12:43 -05:00
Родитель 46f6fa03a6
Коммит 7a8dcf7887
15 изменённых файлов: 2324 добавлений и 3095 удалений

Просмотреть файл

@ -243,7 +243,8 @@ class TestAgent {
TestAgent() :
audio_config_(109, "opus", 48000, 960, 2, 64000, false),
audio_conduit_(mozilla::AudioSessionConduit::Create()),
audio_pipeline_() {
audio_pipeline_(),
use_bundle_(false) {
}
static void ConnectRtp(TestAgent *client, TestAgent *server) {
@ -261,38 +262,24 @@ class TestAgent {
server->bundle_transport_);
}
virtual void CreatePipelines_s(bool aIsRtcpMux) = 0;
void Start() {
MOZ_MTLOG(ML_DEBUG, "Starting");
audio_pipeline_->Init();
}
void StopInt() {
}
virtual void CreatePipeline(bool aIsRtcpMux) = 0;
void Stop() {
MOZ_MTLOG(ML_DEBUG, "Stopping");
if (audio_pipeline_)
audio_pipeline_->ShutdownMedia_m();
mozilla::SyncRunnable::DispatchToThread(
test_utils->sts_target(),
WrapRunnable(this, &TestAgent::StopInt));
audio_pipeline_->Stop();
}
void Shutdown_s() {
audio_rtp_transport_.Shutdown();
audio_rtcp_transport_.Shutdown();
bundle_transport_.Shutdown();
if (audio_pipeline_)
audio_pipeline_->DetachTransport_s();
}
void Shutdown() {
if (audio_pipeline_)
audio_pipeline_->ShutdownMedia_m();
audio_pipeline_->Shutdown_m();
if (audio_stream_track_)
audio_stream_track_->Stop();
@ -329,6 +316,11 @@ class TestAgent {
return audio_pipeline_->rtcp_packets_received();
}
void SetUsingBundle(bool use_bundle) {
use_bundle_ = use_bundle;
}
protected:
mozilla::AudioCodecConfig audio_config_;
RefPtr<mozilla::MediaSessionConduit> audio_conduit_;
@ -340,11 +332,12 @@ class TestAgent {
TransportInfo audio_rtp_transport_;
TransportInfo audio_rtcp_transport_;
TransportInfo bundle_transport_;
bool use_bundle_;
};
class TestAgentSend : public TestAgent {
public:
TestAgentSend() : use_bundle_(false) {
TestAgentSend() {
mozilla::MediaConduitErrorCode err =
static_cast<mozilla::AudioSessionConduit *>(audio_conduit_.get())->
ConfigureSendMediaCodec(&audio_config_);
@ -353,7 +346,7 @@ class TestAgentSend : public TestAgent {
audio_stream_track_ = new FakeAudioStreamTrack();
}
virtual void CreatePipelines_s(bool aIsRtcpMux) {
virtual void CreatePipeline(bool aIsRtcpMux) {
std::string test_pc;
@ -361,6 +354,19 @@ class TestAgentSend : public TestAgent {
ASSERT_FALSE(audio_rtcp_transport_.flow_);
}
RefPtr<MediaPipelineTransmit> audio_pipeline =
new mozilla::MediaPipelineTransmit(
test_pc,
nullptr,
test_utils->sts_target(),
false,
audio_stream_track_.get(),
audio_conduit_);
audio_pipeline->Start();
audio_pipeline_ = audio_pipeline;
RefPtr<TransportFlow> rtp(audio_rtp_transport_.flow_);
RefPtr<TransportFlow> rtcp(audio_rtcp_transport_.flow_);
@ -369,25 +375,9 @@ class TestAgentSend : public TestAgent {
rtcp = nullptr;
}
audio_pipeline_ = new mozilla::MediaPipelineTransmit(
test_pc,
nullptr,
test_utils->sts_target(),
audio_stream_track_.get(),
"audio_track_fake_uuid",
1,
audio_conduit_,
rtp,
rtcp,
nsAutoPtr<MediaPipelineFilter>());
audio_pipeline_->UpdateTransport_m(
rtp, rtcp, nsAutoPtr<MediaPipelineFilter>(nullptr));
}
void SetUsingBundle(bool use_bundle) {
use_bundle_ = use_bundle;
}
private:
bool use_bundle_;
};
@ -404,8 +394,8 @@ class TestAgentReceive : public TestAgent {
EXPECT_EQ(mozilla::kMediaConduitNoError, err);
}
virtual void CreatePipelines_s(bool aIsRtcpMux) {
std::string test_pc;
virtual void CreatePipeline(bool aIsRtcpMux) {
std::string test_pc;
if (aIsRtcpMux) {
ASSERT_FALSE(audio_rtcp_transport_.flow_);
@ -415,11 +405,20 @@ class TestAgentReceive : public TestAgent {
test_pc,
nullptr,
test_utils->sts_target(),
new FakeSourceMediaStream(), "audio_track_fake_uuid", 1, 1,
static_cast<mozilla::AudioSessionConduit *>(audio_conduit_.get()),
audio_rtp_transport_.flow_,
audio_rtcp_transport_.flow_,
bundle_filter_);
nullptr);
audio_pipeline_->Start();
RefPtr<TransportFlow> rtp(audio_rtp_transport_.flow_);
RefPtr<TransportFlow> rtcp(audio_rtcp_transport_.flow_);
if (use_bundle_) {
rtp = bundle_transport_.flow_;
rtcp = nullptr;
}
audio_pipeline_->UpdateTransport_m(rtp, rtcp, bundle_filter_);
}
void SetBundleFilter(nsAutoPtr<MediaPipelineFilter> filter) {
@ -428,8 +427,7 @@ class TestAgentReceive : public TestAgent {
void UpdateFilter_s(
nsAutoPtr<MediaPipelineFilter> filter) {
audio_pipeline_->UpdateTransport_s(1,
audio_rtp_transport_.flow_,
audio_pipeline_->UpdateTransport_s(audio_rtp_transport_.flow_,
audio_rtcp_transport_.flow_,
filter);
}
@ -442,8 +440,6 @@ class TestAgentReceive : public TestAgent {
class MediaPipelineTest : public ::testing::Test {
public:
~MediaPipelineTest() {
p1_.Stop();
p2_.Stop();
p1_.Shutdown();
p2_.Shutdown();
}
@ -494,16 +490,8 @@ class MediaPipelineTest : public ::testing::Test {
// Setup transport flows
InitTransports(aIsRtcpMux);
mozilla::SyncRunnable::DispatchToThread(
test_utils->sts_target(),
WrapRunnable(&p1_, &TestAgent::CreatePipelines_s, aIsRtcpMux), NS_DISPATCH_SYNC);
mozilla::SyncRunnable::DispatchToThread(
test_utils->sts_target(),
WrapRunnable(&p2_, &TestAgent::CreatePipelines_s, aIsRtcpMux), NS_DISPATCH_SYNC);
p2_.Start();
p1_.Start();
p1_.CreatePipeline(aIsRtcpMux);
p2_.CreatePipeline(aIsRtcpMux);
if (bundle) {
PR_Sleep(ms_until_filter_update);

Просмотреть файл

@ -26,6 +26,7 @@
#include "MediaStreamVideoSink.h"
#include "VideoUtils.h"
#include "VideoStreamTrack.h"
#include "MediaEngine.h"
#include "nsError.h"
#include "AudioSegment.h"
@ -565,19 +566,12 @@ MediaPipeline::MediaPipeline(const std::string& pc,
Direction direction,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
const std::string& track_id,
int level,
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter)
RefPtr<MediaSessionConduit> conduit)
: direction_(direction),
track_id_(track_id),
level_(level),
level_(0),
conduit_(conduit),
rtp_(rtp_transport, rtcp_transport ? RTP : MUX),
rtcp_(rtcp_transport ? rtcp_transport : rtp_transport,
rtcp_transport ? RTCP : MUX),
rtp_(nullptr, RTP),
rtcp_(nullptr, RTCP),
main_thread_(main_thread),
sts_thread_(sts_thread),
rtp_packets_sent_(0),
@ -588,24 +582,9 @@ MediaPipeline::MediaPipeline(const std::string& pc,
rtp_bytes_received_(0),
pc_(pc),
description_(),
filter_(filter),
rtp_parser_(webrtc::RtpHeaderParser::Create()){
// To indicate rtcp-mux rtcp_transport should be nullptr.
// Therefore it's an error to send in the same flow for
// both rtp and rtcp.
MOZ_ASSERT(rtp_transport != rtcp_transport);
// PipelineTransport() will access this->sts_thread_; moved here for safety
transport_ = new PipelineTransport(this);
}
MediaPipeline::~MediaPipeline() {
ASSERT_ON_THREAD(main_thread_);
CSFLogInfo(LOGTAG, "Destroying MediaPipeline: %s", description_.c_str());
}
nsresult MediaPipeline::Init() {
ASSERT_ON_THREAD(main_thread_);
packet_dumper_ = new PacketDumper(pc_);
if (direction_ == RECEIVE) {
@ -613,26 +592,30 @@ nsresult MediaPipeline::Init() {
} else {
conduit_->SetTransmitterTransport(transport_);
}
}
MediaPipeline::~MediaPipeline() {
CSFLogInfo(LOGTAG, "Destroying MediaPipeline: %s", description_.c_str());
// MediaSessionConduit insists that it be released on main.
RUN_ON_THREAD(main_thread_, WrapRelease(conduit_.forget()),
NS_DISPATCH_NORMAL);
}
void
MediaPipeline::Shutdown_m()
{
CSFLogInfo(LOGTAG, "%s in %s", description_.c_str(), __FUNCTION__);
Stop();
DetachMedia();
RUN_ON_THREAD(sts_thread_,
WrapRunnable(
RefPtr<MediaPipeline>(this),
&MediaPipeline::Init_s),
&MediaPipeline::DetachTransport_s),
NS_DISPATCH_NORMAL);
return NS_OK;
}
nsresult MediaPipeline::Init_s() {
ASSERT_ON_THREAD(sts_thread_);
return AttachTransport_s();
}
// Disconnect us from the transport so that we can cleanly destruct the
// pipeline on the main thread. ShutdownMedia_m() must have already been
// called
void
MediaPipeline::DetachTransport_s()
{
@ -672,16 +655,14 @@ MediaPipeline::AttachTransport_s()
}
void
MediaPipeline::UpdateTransport_m(int level,
RefPtr<TransportFlow> rtp_transport,
MediaPipeline::UpdateTransport_m(RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter)
{
RUN_ON_THREAD(sts_thread_,
WrapRunnable(
this,
RefPtr<MediaPipeline>(this),
&MediaPipeline::UpdateTransport_s,
level,
rtp_transport,
rtcp_transport,
filter),
@ -689,8 +670,7 @@ MediaPipeline::UpdateTransport_m(int level,
}
void
MediaPipeline::UpdateTransport_s(int level,
RefPtr<TransportFlow> rtp_transport,
MediaPipeline::UpdateTransport_s(RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter)
{
@ -706,13 +686,13 @@ MediaPipeline::UpdateTransport_s(int level,
transport_->Detach();
rtp_.Detach();
rtcp_.Detach();
rtp_ = TransportInfo(rtp_transport, rtcp_mux ? MUX : RTP);
rtcp_ = TransportInfo(rtcp_transport, rtcp_mux ? MUX : RTCP);
AttachTransport_s();
if (rtp_transport && rtcp_transport) {
rtp_ = TransportInfo(rtp_transport, rtcp_mux ? MUX : RTP);
rtcp_ = TransportInfo(rtcp_transport, rtcp_mux ? MUX : RTCP);
AttachTransport_s();
}
}
level_ = level;
if (filter_ && filter) {
// Use the new filter, but don't forget any remote SSRCs that we've learned
// by receiving traffic.
@ -888,27 +868,29 @@ nsresult MediaPipeline::TransportReady_s(TransportInfo &info) {
return NS_ERROR_FAILURE;
}
CSFLogInfo(LOGTAG, "Listening for %s packets received on %p",
ToString(info.type_), dtls->downward());
if (direction_ == RECEIVE) {
CSFLogInfo(LOGTAG, "Listening for %s packets received on %p",
ToString(info.type_), dtls->downward());
switch (info.type_) {
case RTP:
dtls->downward()->SignalPacketReceived.connect(
this,
&MediaPipeline::RtpPacketReceived);
break;
case RTCP:
dtls->downward()->SignalPacketReceived.connect(
this,
&MediaPipeline::RtcpPacketReceived);
break;
case MUX:
dtls->downward()->SignalPacketReceived.connect(
this,
&MediaPipeline::PacketReceived);
break;
default:
MOZ_CRASH();
switch (info.type_) {
case RTP:
dtls->downward()->SignalPacketReceived.connect(
this,
&MediaPipeline::RtpPacketReceived);
break;
case RTCP:
dtls->downward()->SignalPacketReceived.connect(
this,
&MediaPipeline::RtcpPacketReceived);
break;
case MUX:
dtls->downward()->SignalPacketReceived.connect(
this,
&MediaPipeline::PacketReceived);
break;
default:
MOZ_CRASH();
}
}
info.state_ = MP_OPEN;
@ -1013,6 +995,10 @@ void MediaPipeline::increment_rtcp_packets_received() {
void MediaPipeline::RtpPacketReceived(TransportLayer *layer,
const unsigned char *data,
size_t len) {
if (direction_ == TRANSMIT) {
return;
}
if (!transport_->pipeline()) {
CSFLogError(LOGTAG, "Discarding incoming packet; transport disconnected");
return;
@ -1036,10 +1022,6 @@ void MediaPipeline::RtpPacketReceived(TransportLayer *layer,
// This should never happen.
MOZ_ASSERT(rtp_.recv_srtp_);
if (direction_ == TRANSMIT) {
return;
}
if (!len) {
return;
}
@ -1162,14 +1144,12 @@ void MediaPipeline::RtcpPacketReceived(TransportLayer *layer,
return;
}
// We do not filter RTCP for send pipelines, since the webrtc.org code for
// We do not filter receiver reports, since the webrtc.org code for
// senders already has logic to ignore RRs that do not apply.
// TODO bug 1279153: remove SR check for reduced size RTCP
if (filter_ && direction_ == RECEIVE) {
if (!filter_->FilterSenderReport(data, len)) {
CSFLogWarn(LOGTAG, "Dropping incoming RTCP packet; filtered out");
return;
}
if (filter_ && !filter_->FilterSenderReport(data, len)) {
CSFLogWarn(LOGTAG, "Dropping incoming RTCP packet; filtered out");
return;
}
packet_dumper_->Dump(
@ -1311,14 +1291,14 @@ public:
VideoType aVideoType,
uint64_t aCaptureTime)
{
MOZ_ASSERT(conduit_->type() == MediaSessionConduit::VIDEO);
MOZ_RELEASE_ASSERT(conduit_->type() == MediaSessionConduit::VIDEO);
static_cast<VideoSessionConduit*>(conduit_.get())->SendVideoFrame(
aVideoFrame, aVideoFrameLength, aWidth, aHeight, aVideoType, aCaptureTime);
}
void OnVideoFrameConverted(webrtc::VideoFrame& aVideoFrame)
{
MOZ_ASSERT(conduit_->type() == MediaSessionConduit::VIDEO);
MOZ_RELEASE_ASSERT(conduit_->type() == MediaSessionConduit::VIDEO);
static_cast<VideoSessionConduit*>(conduit_.get())->SendVideoFrame(aVideoFrame);
}
@ -1433,18 +1413,16 @@ MediaPipelineTransmit::MediaPipelineTransmit(
const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
bool is_video,
dom::MediaStreamTrack* domtrack,
const std::string& track_id,
int level,
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter) :
MediaPipeline(pc, TRANSMIT, main_thread, sts_thread, track_id, level,
conduit, rtp_transport, rtcp_transport, filter),
RefPtr<MediaSessionConduit> conduit) :
MediaPipeline(pc, TRANSMIT, main_thread, sts_thread, conduit),
listener_(new PipelineListener(conduit)),
domtrack_(domtrack)
is_video_(is_video),
domtrack_(domtrack),
transmitting_(false)
{
SetDescription();
if (!IsVideo()) {
audio_processing_ = MakeAndAddRef<AudioProxyThread>(static_cast<AudioSessionConduit*>(conduit.get()));
listener_->SetAudioProxy(audio_processing_);
@ -1467,22 +1445,58 @@ MediaPipelineTransmit::~MediaPipelineTransmit()
if (feeder_) {
feeder_->Detach();
}
MOZ_ASSERT(!domtrack_);
}
nsresult MediaPipelineTransmit::Init() {
AttachToTrack(track_id_);
return MediaPipeline::Init();
}
void MediaPipelineTransmit::AttachToTrack(const std::string& track_id) {
ASSERT_ON_THREAD(main_thread_);
void MediaPipelineTransmit::SetDescription() {
description_ = pc_ + "| ";
description_ += conduit_->type() == MediaSessionConduit::AUDIO ?
"Transmit audio[" : "Transmit video[";
if (!domtrack_) {
description_ += "no track]";
return;
}
nsString nsTrackId;
domtrack_->GetId(nsTrackId);
std::string track_id(NS_ConvertUTF16toUTF8(nsTrackId).get());
description_ += track_id;
description_ += "]";
}
void MediaPipelineTransmit::Stop() {
ASSERT_ON_THREAD(main_thread_);
if (!domtrack_ || !transmitting_) {
return;
}
transmitting_ = false;
if (domtrack_->AsAudioStreamTrack()) {
domtrack_->RemoveDirectListener(listener_);
domtrack_->RemoveListener(listener_);
} else if (VideoStreamTrack* video = domtrack_->AsVideoStreamTrack()) {
video->RemoveVideoOutput(listener_);
} else {
MOZ_ASSERT(false, "Unknown track type");
}
conduit_->StopTransmitting();
}
void MediaPipelineTransmit::Start() {
ASSERT_ON_THREAD(main_thread_);
if (!domtrack_ || transmitting_) {
return;
}
transmitting_ = true;
conduit_->StartTransmitting();
// TODO(ekr@rtfm.com): Check for errors
CSFLogDebug(LOGTAG, "Attaching pipeline to track %p conduit type=%s", this,
@ -1516,7 +1530,7 @@ void MediaPipelineTransmit::AttachToTrack(const std::string& track_id) {
bool
MediaPipelineTransmit::IsVideo() const
{
return !!domtrack_->AsVideoStreamTrack();
return is_video_;
}
void MediaPipelineTransmit::UpdateSinkIdentity_m(MediaStreamTrack* track,
@ -1550,17 +1564,7 @@ void
MediaPipelineTransmit::DetachMedia()
{
ASSERT_ON_THREAD(main_thread_);
if (domtrack_) {
if (domtrack_->AsAudioStreamTrack()) {
domtrack_->RemoveDirectListener(listener_);
domtrack_->RemoveListener(listener_);
} else if (VideoStreamTrack* video = domtrack_->AsVideoStreamTrack()) {
video->RemoveVideoOutput(listener_);
} else {
MOZ_ASSERT(false, "Unknown track type");
}
domtrack_ = nullptr;
}
domtrack_ = nullptr;
// Let the listener be destroyed with the pipeline (or later).
}
@ -1577,21 +1581,31 @@ nsresult MediaPipelineTransmit::TransportReady_s(TransportInfo &info) {
return NS_OK;
}
nsresult MediaPipelineTransmit::ReplaceTrack(MediaStreamTrack& domtrack) {
nsresult MediaPipelineTransmit::ReplaceTrack(RefPtr<MediaStreamTrack>& domtrack) {
// MainThread, checked in calls we make
nsString nsTrackId;
domtrack.GetId(nsTrackId);
std::string track_id(NS_ConvertUTF16toUTF8(nsTrackId).get());
CSFLogDebug(LOGTAG, "Reattaching pipeline %s to track %p track %s conduit type: %s",
description_.c_str(), &domtrack, track_id.c_str(),
(conduit_->type() == MediaSessionConduit::AUDIO ?"audio":"video"));
if (domtrack) {
nsString nsTrackId;
domtrack->GetId(nsTrackId);
std::string track_id(NS_ConvertUTF16toUTF8(nsTrackId).get());
CSFLogDebug(LOGTAG, "Reattaching pipeline %s to track %p track %s conduit type: %s",
description_.c_str(), &domtrack, track_id.c_str(),
(conduit_->type() == MediaSessionConduit::AUDIO ?"audio":"video"));
}
DetachMedia();
domtrack_ = &domtrack; // Detach clears it
// Unsets the track id after RemoveListener() takes effect.
listener_->UnsetTrackId(domtrack_->GraphImpl());
track_id_ = track_id;
AttachToTrack(track_id);
RefPtr<dom::MediaStreamTrack> oldTrack = domtrack_;
bool wasTransmitting = oldTrack && transmitting_;
Stop();
domtrack_ = domtrack;
SetDescription();
if (oldTrack) {
// Unsets the track id after RemoveListener() takes effect.
listener_->UnsetTrackId(oldTrack->GraphImpl());
}
if (wasTransmitting) {
Start();
}
return NS_OK;
}
@ -1889,28 +1903,6 @@ class GenericReceiveCallback : public TrackAddedCallback
RefPtr<GenericReceiveListener> listener_;
};
// Add a listener on the MSG thread using the MSG command queue
static void AddListener(MediaStream* source, MediaStreamListener* listener) {
class Message : public ControlMessage {
public:
Message(MediaStream* stream, MediaStreamListener* listener)
: ControlMessage(stream),
listener_(listener) {}
virtual void Run() override {
mStream->AddListenerImpl(listener_.forget());
}
private:
RefPtr<MediaStreamListener> listener_;
};
MOZ_ASSERT(listener);
if (source->GraphImpl()) {
source->GraphImpl()->AppendMessage(MakeUnique<Message>(source, listener));
}
}
class GenericReceiveListener : public MediaStreamListener
{
public:
@ -1919,18 +1911,56 @@ class GenericReceiveListener : public MediaStreamListener
track_id_(track_id),
played_ticks_(0),
last_log_(0),
principal_handle_(PRINCIPAL_HANDLE_NONE) {}
principal_handle_(PRINCIPAL_HANDLE_NONE),
listening_(false)
{
MOZ_ASSERT(source);
}
virtual ~GenericReceiveListener() {}
void AddSelf()
{
AddListener(source_, this);
if (!listening_) {
listening_ = true;
source_->AddListener(this);
}
}
void RemoveSelf()
{
if (listening_) {
listening_ = false;
source_->RemoveListener(this);
}
}
void EndTrack()
{
source_->EndTrack(track_id_);
CSFLogDebug(LOGTAG, "GenericReceiveListener ending track");
// We do this on MSG to avoid it racing against StartTrack.
class Message : public ControlMessage
{
public:
Message(SourceMediaStream* stream,
TrackID track_id)
: ControlMessage(stream),
source_(stream),
track_id_(track_id)
{}
void Run() override {
source_->EndTrack(track_id_);
}
RefPtr<SourceMediaStream> source_;
const TrackID track_id_;
};
source_->GraphImpl()->AppendMessage(MakeUnique<Message>(source_, track_id_));
// This breaks the cycle with source_
source_->RemoveListener(this);
}
// Must be called on the main thread
@ -1965,45 +1995,35 @@ class GenericReceiveListener : public MediaStreamListener
}
protected:
SourceMediaStream *source_;
RefPtr<SourceMediaStream> source_;
const TrackID track_id_;
TrackTicks played_ticks_;
TrackTicks last_log_; // played_ticks_ when we last logged
PrincipalHandle principal_handle_;
bool listening_;
};
MediaPipelineReceive::MediaPipelineReceive(
const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
SourceMediaStream *stream,
const std::string& track_id,
int level,
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter) :
MediaPipeline(pc, RECEIVE, main_thread, sts_thread,
track_id, level, conduit, rtp_transport,
rtcp_transport, filter),
stream_(stream),
RefPtr<MediaSessionConduit> conduit) :
MediaPipeline(pc, RECEIVE, main_thread, sts_thread, conduit),
segments_added_(0)
{
MOZ_ASSERT(stream_);
}
MediaPipelineReceive::~MediaPipelineReceive()
{
MOZ_ASSERT(!stream_); // Check that we have shut down already.
}
class MediaPipelineReceiveAudio::PipelineListener
: public GenericReceiveListener
{
public:
PipelineListener(SourceMediaStream * source, TrackID track_id,
PipelineListener(SourceMediaStream * source,
const RefPtr<MediaSessionConduit>& conduit)
: GenericReceiveListener(source, track_id),
: GenericReceiveListener(source, kAudioTrack),
conduit_(conduit)
{
}
@ -2119,57 +2139,53 @@ MediaPipelineReceiveAudio::MediaPipelineReceiveAudio(
const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
SourceMediaStream* stream,
const std::string& media_stream_track_id,
TrackID numeric_track_id,
int level,
RefPtr<AudioSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter) :
MediaPipelineReceive(pc, main_thread, sts_thread,
stream, media_stream_track_id, level, conduit,
rtp_transport, rtcp_transport, filter),
listener_(new PipelineListener(stream, numeric_track_id, conduit))
{}
SourceMediaStream* aStream) :
MediaPipelineReceive(pc, main_thread, sts_thread, conduit),
listener_(aStream ? new PipelineListener(aStream, conduit_) : nullptr)
{
description_ = pc_ + "| Receive audio";
}
void MediaPipelineReceiveAudio::DetachMedia()
{
ASSERT_ON_THREAD(main_thread_);
if (stream_ && listener_) {
if (listener_) {
listener_->EndTrack();
if (stream_->GraphImpl()) {
stream_->RemoveListener(listener_);
}
stream_ = nullptr;
listener_ = nullptr;
}
}
nsresult MediaPipelineReceiveAudio::Init()
{
ASSERT_ON_THREAD(main_thread_);
CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
description_ = pc_ + "| Receive audio[";
description_ += track_id_;
description_ += "]";
listener_->AddSelf();
return MediaPipelineReceive::Init();
}
void MediaPipelineReceiveAudio::SetPrincipalHandle_m(const PrincipalHandle& principal_handle)
{
listener_->SetPrincipalHandle_m(principal_handle);
if (listener_) {
listener_->SetPrincipalHandle_m(principal_handle);
}
}
void
MediaPipelineReceiveAudio::Start()
{
conduit_->StartReceiving();
if (listener_) {
listener_->AddSelf();
}
}
void
MediaPipelineReceiveAudio::Stop()
{
if (listener_) {
listener_->RemoveSelf();
}
conduit_->StopReceiving();
}
class MediaPipelineReceiveVideo::PipelineListener
: public GenericReceiveListener {
public:
PipelineListener(SourceMediaStream* source, TrackID track_id)
: GenericReceiveListener(source, track_id)
explicit PipelineListener(SourceMediaStream* source)
: GenericReceiveListener(source, kVideoTrack)
, image_container_()
, image_()
, mutex_("Video PipelineListener")
@ -2298,20 +2314,15 @@ MediaPipelineReceiveVideo::MediaPipelineReceiveVideo(
const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
SourceMediaStream *stream,
const std::string& media_stream_track_id,
TrackID numeric_track_id,
int level,
RefPtr<VideoSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter) :
MediaPipelineReceive(pc, main_thread, sts_thread,
stream, media_stream_track_id, level, conduit,
rtp_transport, rtcp_transport, filter),
SourceMediaStream* aStream) :
MediaPipelineReceive(pc, main_thread, sts_thread, conduit),
renderer_(new PipelineRenderer(this)),
listener_(new PipelineListener(stream, numeric_track_id))
{}
listener_(aStream ? new PipelineListener(aStream) : nullptr)
{
description_ = pc_ + "| Receive video";
conduit->AttachRenderer(renderer_);
}
void MediaPipelineReceiveVideo::DetachMedia()
{
@ -2322,33 +2333,35 @@ void MediaPipelineReceiveVideo::DetachMedia()
// avoid cycles, and the render callbacks are invoked from a different
// thread so simple null-checks would cause TSAN bugs without locks.
static_cast<VideoSessionConduit*>(conduit_.get())->DetachRenderer();
if (stream_ && listener_) {
if (listener_) {
listener_->EndTrack();
stream_->RemoveListener(listener_);
stream_ = nullptr;
listener_ = nullptr;
}
}
nsresult MediaPipelineReceiveVideo::Init() {
ASSERT_ON_THREAD(main_thread_);
CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
description_ = pc_ + "| Receive video[";
description_ += track_id_;
description_ += "]";
listener_->AddSelf();
// Always happens before we can DetachMedia()
static_cast<VideoSessionConduit *>(conduit_.get())->
AttachRenderer(renderer_);
return MediaPipelineReceive::Init();
}
void MediaPipelineReceiveVideo::SetPrincipalHandle_m(const PrincipalHandle& principal_handle)
{
listener_->SetPrincipalHandle_m(principal_handle);
if (listener_) {
listener_->SetPrincipalHandle_m(principal_handle);
}
}
void
MediaPipelineReceiveVideo::Start()
{
conduit_->StartReceiving();
if (listener_) {
listener_->AddSelf();
}
}
void
MediaPipelineReceiveVideo::Stop()
{
if (listener_) {
listener_->RemoveSelf();
}
conduit_->StopReceiving();
}
DOMHighResTimeStamp MediaPipeline::GetNow() {

Просмотреть файл

@ -12,13 +12,13 @@
#include "sigslot.h"
#include "MediaConduitInterface.h"
#include "signaling/src/media-conduit/MediaConduitInterface.h"
#include "mozilla/ReentrantMonitor.h"
#include "mozilla/Atomics.h"
#include "SrtpFlow.h"
#include "databuffer.h"
#include "runnable_utils.h"
#include "transportflow.h"
#include "mtransport/runnable_utils.h"
#include "mtransport/transportflow.h"
#include "AudioPacketizer.h"
#include "StreamTracks.h"
#include "signaling/src/peerconnection/PacketDumper.h"
@ -83,38 +83,22 @@ class MediaPipeline : public sigslot::has_slots<> {
Direction direction,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
const std::string& track_id,
int level,
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter);
RefPtr<MediaSessionConduit> conduit);
// Must be called on the STS thread. Must be called after ShutdownMedia_m().
void DetachTransport_s();
virtual void Start() = 0;
virtual void Stop() = 0;
virtual void DetachMedia() {}
void SetLevel(size_t level) { level_ = level; }
// Must be called on the main thread.
void ShutdownMedia_m()
{
ASSERT_ON_THREAD(main_thread_);
void Shutdown_m();
if (direction_ == RECEIVE) {
conduit_->StopReceiving();
} else {
conduit_->StopTransmitting();
}
DetachMedia();
}
virtual nsresult Init();
void UpdateTransport_m(int level,
RefPtr<TransportFlow> rtp_transport,
void UpdateTransport_m(RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter);
void UpdateTransport_s(int level,
RefPtr<TransportFlow> rtp_transport,
void UpdateTransport_s(RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter);
@ -128,8 +112,7 @@ class MediaPipeline : public sigslot::has_slots<> {
void AddRIDFilter_s(const std::string& rid);
virtual Direction direction() const { return direction_; }
virtual const std::string& trackid() const { return track_id_; }
virtual int level() const { return level_; }
int level() const { return level_; }
virtual bool IsVideo() const = 0;
bool IsDoingRtcpMux() const {
@ -213,17 +196,13 @@ class MediaPipeline : public sigslot::has_slots<> {
nsresult SendRtpRtcpPacket_s(nsAutoPtr<DataBuffer> data,
bool is_rtp);
MediaPipeline *pipeline_; // Raw pointer to avoid cycles
// Creates a cycle, which we break with Detach
RefPtr<MediaPipeline> pipeline_;
nsCOMPtr<nsIEventTarget> sts_thread_;
};
RefPtr<PipelineTransport> GetPiplelineTransport() {
return transport_;
}
protected:
virtual ~MediaPipeline();
virtual void DetachMedia() {}
nsresult AttachTransport_s();
friend class PipelineTransport;
@ -233,7 +212,6 @@ class MediaPipeline : public sigslot::has_slots<> {
transport_(flow),
state_(MP_CONNECTING),
type_(type) {
MOZ_ASSERT(flow);
}
void Detach()
@ -279,14 +257,7 @@ class MediaPipeline : public sigslot::has_slots<> {
size_t len);
Direction direction_;
std::string track_id_; // The track on the stream.
// Written on the main thread.
// Used on STS and MediaStreamGraph threads.
// Not used outside initialization in MediaPipelineTransmit
// The m-line index (starting at 0, to match convention) Atomic because
// this value is updated from STS, but read on main, and we don't want to
// bother with dispatches just to get an int occasionally.
Atomic<int> level_;
size_t level_;
RefPtr<MediaSessionConduit> conduit_; // Our conduit. Written on the main
// thread. Read on STS thread.
@ -299,8 +270,7 @@ class MediaPipeline : public sigslot::has_slots<> {
nsCOMPtr<nsIEventTarget> main_thread_;
nsCOMPtr<nsIEventTarget> sts_thread_;
// Created on Init. Referenced by the conduit and eventually
// destroyed on the STS thread.
// Created in c'tor. Referenced by the conduit.
RefPtr<PipelineTransport> transport_;
// Only safe to access from STS thread.
@ -315,11 +285,11 @@ class MediaPipeline : public sigslot::has_slots<> {
// Only safe to access from STS thread.
std::map<uint32_t, RtpCSRCStats> csrc_stats_;
// Written on Init. Read on STS thread.
// Written in c'tor. Read on STS thread.
std::string pc_;
std::string description_;
// Written on Init, all following accesses are on the STS thread.
// Written in c'tor, all following accesses are on the STS thread.
nsAutoPtr<MediaPipelineFilter> filter_;
nsAutoPtr<webrtc::RtpHeaderParser> rtp_parser_;
@ -328,9 +298,10 @@ class MediaPipeline : public sigslot::has_slots<> {
private:
// Gets the current time as a DOMHighResTimeStamp
static DOMHighResTimeStamp GetNow();
nsresult Init_s();
bool IsRtp(const unsigned char *data, size_t len);
// Must be called on the STS thread. Must be called after DetachMedia().
void DetachTransport_s();
};
class ConduitDeleteEvent: public Runnable
@ -354,18 +325,12 @@ public:
MediaPipelineTransmit(const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
bool is_video,
dom::MediaStreamTrack* domtrack,
const std::string& track_id,
int level,
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter);
RefPtr<MediaSessionConduit> conduit);
// Initialize (stuff here may fail)
nsresult Init() override;
virtual void AttachToTrack(const std::string& track_id);
void Start() override;
void Stop() override;
// written and used from MainThread
bool IsVideo() const override;
@ -387,7 +352,7 @@ public:
// In non-compliance with the likely final spec, allow the new
// track to be part of a different stream (since we don't support
// multiple tracks of a type in a stream yet). bug 1056650
virtual nsresult ReplaceTrack(dom::MediaStreamTrack& domtrack);
virtual nsresult ReplaceTrack(RefPtr<dom::MediaStreamTrack>& domtrack);
// Separate classes to allow ref counting
class PipelineListener;
@ -396,40 +361,38 @@ public:
protected:
~MediaPipelineTransmit();
void SetDescription();
private:
RefPtr<PipelineListener> listener_;
RefPtr<AudioProxyThread> audio_processing_;
RefPtr<VideoFrameFeeder> feeder_;
RefPtr<VideoFrameConverter> converter_;
dom::MediaStreamTrack* domtrack_;
bool is_video_;
RefPtr<dom::MediaStreamTrack> domtrack_;
bool transmitting_;
};
// A specialization of pipeline for reading from the network and
// rendering video.
// rendering media.
class MediaPipelineReceive : public MediaPipeline {
public:
// Set rtcp_transport to nullptr to use rtcp-mux
MediaPipelineReceive(const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
SourceMediaStream *stream,
const std::string& track_id,
int level,
RefPtr<MediaSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter);
RefPtr<MediaSessionConduit> conduit);
int segments_added() const { return segments_added_; }
// Sets the PrincipalHandle we set on the media chunks produced by this
// pipeline. Must be called on the main thread.
virtual void SetPrincipalHandle_m(const PrincipalHandle& principal_handle) = 0;
protected:
~MediaPipelineReceive();
RefPtr<SourceMediaStream> stream_;
int segments_added_;
private:
@ -443,27 +406,18 @@ class MediaPipelineReceiveAudio : public MediaPipelineReceive {
MediaPipelineReceiveAudio(const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
SourceMediaStream* stream,
// This comes from an msid attribute. Everywhere
// but MediaStreamGraph uses this.
const std::string& media_stream_track_id,
// This is an integer identifier that is only
// unique within a single DOMMediaStream, which is
// used by MediaStreamGraph
TrackID numeric_track_id,
int level,
RefPtr<AudioSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter);
SourceMediaStream* aStream);
void DetachMedia() override;
nsresult Init() override;
bool IsVideo() const override { return false; }
void SetPrincipalHandle_m(const PrincipalHandle& principal_handle) override;
void Start() override;
void Stop() override;
private:
// Separate class to allow ref counting
class PipelineListener;
@ -479,28 +433,19 @@ class MediaPipelineReceiveVideo : public MediaPipelineReceive {
MediaPipelineReceiveVideo(const std::string& pc,
nsCOMPtr<nsIEventTarget> main_thread,
nsCOMPtr<nsIEventTarget> sts_thread,
SourceMediaStream *stream,
// This comes from an msid attribute. Everywhere
// but MediaStreamGraph uses this.
const std::string& media_stream_track_id,
// This is an integer identifier that is only
// unique within a single DOMMediaStream, which is
// used by MediaStreamGraph
TrackID numeric_track_id,
int level,
RefPtr<VideoSessionConduit> conduit,
RefPtr<TransportFlow> rtp_transport,
RefPtr<TransportFlow> rtcp_transport,
nsAutoPtr<MediaPipelineFilter> filter);
SourceMediaStream* aStream);
// Called on the main thread.
void DetachMedia() override;
nsresult Init() override;
bool IsVideo() const override { return true; }
void SetPrincipalHandle_m(const PrincipalHandle& principal_handle) override;
void Start() override;
void Stop() override;
private:
class PipelineRenderer;
friend class PipelineRenderer;

Просмотреть файл

@ -109,7 +109,8 @@ MediaPipelineFilter::FilterSenderReport(const unsigned char* data,
uint8_t payload_type = data[PT_OFFSET];
if (payload_type != SENDER_REPORT_T) {
return false;
// Not a sender report, let it through
return true;
}
uint32_t ssrc = 0;

Просмотреть файл

@ -6,7 +6,9 @@
include('/media/webrtc/webrtc.mozbuild')
LOCAL_INCLUDES += [
'!/ipc/ipdl/_ipdlheaders',
'/dom/media',
'/ipc/chromium/src',
'/media/libyuv/libyuv/include',
'/media/mtransport',
'/media/webrtc',

Просмотреть файл

@ -1,954 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "logging.h"
#include "nsIGfxInfo.h"
#include "nsServiceManagerUtils.h"
#include "PeerConnectionImpl.h"
#include "PeerConnectionMedia.h"
#include "MediaPipelineFactory.h"
#include "MediaPipelineFilter.h"
#include "transportflow.h"
#include "transportlayer.h"
#include "transportlayerdtls.h"
#include "transportlayerice.h"
#include "signaling/src/jsep/JsepTrack.h"
#include "signaling/src/jsep/JsepTransport.h"
#include "signaling/src/common/PtrVector.h"
#include "MediaStreamTrack.h"
#include "nsIPrincipal.h"
#include "nsIDocument.h"
#include "mozilla/Preferences.h"
#include "MediaEngine.h"
#include "mozilla/Preferences.h"
#include "WebrtcGmpVideoCodec.h"
#include <stdlib.h>
namespace mozilla {
MOZ_MTLOG_MODULE("MediaPipelineFactory")
static nsresult
JsepCodecDescToCodecConfig(const JsepCodecDescription& aCodec,
AudioCodecConfig** aConfig)
{
MOZ_ASSERT(aCodec.mType == SdpMediaSection::kAudio);
if (aCodec.mType != SdpMediaSection::kAudio)
return NS_ERROR_INVALID_ARG;
const JsepAudioCodecDescription& desc =
static_cast<const JsepAudioCodecDescription&>(aCodec);
uint16_t pt;
if (!desc.GetPtAsInt(&pt)) {
MOZ_MTLOG(ML_ERROR, "Invalid payload type: " << desc.mDefaultPt);
return NS_ERROR_INVALID_ARG;
}
*aConfig = new AudioCodecConfig(pt,
desc.mName,
desc.mClock,
desc.mPacketSize,
desc.mForceMono ? 1 : desc.mChannels,
desc.mBitrate,
desc.mFECEnabled);
(*aConfig)->mMaxPlaybackRate = desc.mMaxPlaybackRate;
(*aConfig)->mDtmfEnabled = desc.mDtmfEnabled;
return NS_OK;
}
static std::vector<JsepCodecDescription*>
GetCodecs(const JsepTrackNegotiatedDetails& aDetails)
{
// We do not try to handle cases where a codec is not used on the primary
// encoding.
if (aDetails.GetEncodingCount()) {
return aDetails.GetEncoding(0).GetCodecs();
}
return std::vector<JsepCodecDescription*>();
}
static nsresult
NegotiatedDetailsToAudioCodecConfigs(const JsepTrackNegotiatedDetails& aDetails,
PtrVector<AudioCodecConfig>* aConfigs)
{
std::vector<JsepCodecDescription*> codecs(GetCodecs(aDetails));
for (const JsepCodecDescription* codec : codecs) {
AudioCodecConfig* config;
if (NS_FAILED(JsepCodecDescToCodecConfig(*codec, &config))) {
return NS_ERROR_INVALID_ARG;
}
aConfigs->values.push_back(config);
}
return NS_OK;
}
static nsresult
JsepCodecDescToCodecConfig(const JsepCodecDescription& aCodec,
VideoCodecConfig** aConfig)
{
MOZ_ASSERT(aCodec.mType == SdpMediaSection::kVideo);
if (aCodec.mType != SdpMediaSection::kVideo) {
MOZ_ASSERT(false, "JsepCodecDescription has wrong type");
return NS_ERROR_INVALID_ARG;
}
const JsepVideoCodecDescription& desc =
static_cast<const JsepVideoCodecDescription&>(aCodec);
uint16_t pt;
if (!desc.GetPtAsInt(&pt)) {
MOZ_MTLOG(ML_ERROR, "Invalid payload type: " << desc.mDefaultPt);
return NS_ERROR_INVALID_ARG;
}
UniquePtr<VideoCodecConfigH264> h264Config;
if (desc.mName == "H264") {
h264Config = MakeUnique<VideoCodecConfigH264>();
size_t spropSize = sizeof(h264Config->sprop_parameter_sets);
strncpy(h264Config->sprop_parameter_sets,
desc.mSpropParameterSets.c_str(),
spropSize);
h264Config->sprop_parameter_sets[spropSize - 1] = '\0';
h264Config->packetization_mode = desc.mPacketizationMode;
h264Config->profile_level_id = desc.mProfileLevelId;
h264Config->tias_bw = 0; // TODO. Issue 165.
}
VideoCodecConfig* configRaw;
configRaw = new VideoCodecConfig(
pt, desc.mName, desc.mConstraints, h264Config.get());
configRaw->mAckFbTypes = desc.mAckFbTypes;
configRaw->mNackFbTypes = desc.mNackFbTypes;
configRaw->mCcmFbTypes = desc.mCcmFbTypes;
configRaw->mRembFbSet = desc.RtcpFbRembIsSet();
configRaw->mFECFbSet = desc.mFECEnabled;
if (desc.mFECEnabled) {
configRaw->mREDPayloadType = desc.mREDPayloadType;
configRaw->mULPFECPayloadType = desc.mULPFECPayloadType;
}
*aConfig = configRaw;
return NS_OK;
}
static nsresult
NegotiatedDetailsToVideoCodecConfigs(const JsepTrackNegotiatedDetails& aDetails,
PtrVector<VideoCodecConfig>* aConfigs)
{
std::vector<JsepCodecDescription*> codecs(GetCodecs(aDetails));
for (const JsepCodecDescription* codec : codecs) {
VideoCodecConfig* config;
if (NS_FAILED(JsepCodecDescToCodecConfig(*codec, &config))) {
return NS_ERROR_INVALID_ARG;
}
config->mTias = aDetails.GetTias();
for (size_t i = 0; i < aDetails.GetEncodingCount(); ++i) {
const JsepTrackEncoding& jsepEncoding(aDetails.GetEncoding(i));
if (jsepEncoding.HasFormat(codec->mDefaultPt)) {
VideoCodecConfig::SimulcastEncoding encoding;
encoding.rid = jsepEncoding.mRid;
encoding.constraints = jsepEncoding.mConstraints;
config->mSimulcastEncodings.push_back(encoding);
}
}
aConfigs->values.push_back(config);
}
return NS_OK;
}
// Accessing the PCMedia should be safe here because we shouldn't
// have enqueued this function unless it was still active and
// the ICE data is destroyed on the STS.
static void
FinalizeTransportFlow_s(RefPtr<PeerConnectionMedia> aPCMedia,
RefPtr<TransportFlow> aFlow, size_t aLevel,
bool aIsRtcp,
nsAutoPtr<PtrVector<TransportLayer> > aLayerList)
{
TransportLayerIce* ice =
static_cast<TransportLayerIce*>(aLayerList->values.front());
ice->SetParameters(aPCMedia->ice_ctx(),
aPCMedia->ice_media_stream(aLevel),
aIsRtcp ? 2 : 1);
nsAutoPtr<std::queue<TransportLayer*> > layerQueue(
new std::queue<TransportLayer*>);
for (auto& value : aLayerList->values) {
layerQueue->push(value);
}
aLayerList->values.clear();
(void)aFlow->PushLayers(layerQueue); // TODO(bug 854518): Process errors.
}
static void
AddNewIceStreamForRestart_s(RefPtr<PeerConnectionMedia> aPCMedia,
RefPtr<TransportFlow> aFlow,
size_t aLevel,
bool aIsRtcp)
{
TransportLayerIce* ice =
static_cast<TransportLayerIce*>(aFlow->GetLayer("ice"));
ice->SetParameters(aPCMedia->ice_ctx(),
aPCMedia->ice_media_stream(aLevel),
aIsRtcp ? 2 : 1);
}
nsresult
MediaPipelineFactory::CreateOrGetTransportFlow(
size_t aLevel,
bool aIsRtcp,
const JsepTransport& aTransport,
RefPtr<TransportFlow>* aFlowOutparam)
{
nsresult rv;
RefPtr<TransportFlow> flow;
flow = mPCMedia->GetTransportFlow(aLevel, aIsRtcp);
if (flow) {
if (mPCMedia->IsIceRestarting()) {
MOZ_MTLOG(ML_INFO, "Flow[" << flow->id() << "]: "
<< "detected ICE restart - level: "
<< aLevel << " rtcp: " << aIsRtcp);
rv = mPCMedia->GetSTSThread()->Dispatch(
WrapRunnableNM(AddNewIceStreamForRestart_s,
mPCMedia, flow, aLevel, aIsRtcp),
NS_DISPATCH_NORMAL);
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Failed to dispatch AddNewIceStreamForRestart_s");
return rv;
}
}
*aFlowOutparam = flow;
return NS_OK;
}
std::ostringstream osId;
osId << mPC->GetHandle() << ":" << aLevel << ","
<< (aIsRtcp ? "rtcp" : "rtp");
flow = new TransportFlow(osId.str());
// The media streams are made on STS so we need to defer setup.
auto ice = MakeUnique<TransportLayerIce>(mPC->GetHandle());
auto dtls = MakeUnique<TransportLayerDtls>();
dtls->SetRole(aTransport.mDtls->GetRole() ==
JsepDtlsTransport::kJsepDtlsClient
? TransportLayerDtls::CLIENT
: TransportLayerDtls::SERVER);
RefPtr<DtlsIdentity> pcid = mPC->Identity();
if (!pcid) {
MOZ_MTLOG(ML_ERROR, "Failed to get DTLS identity.");
return NS_ERROR_FAILURE;
}
dtls->SetIdentity(pcid);
const SdpFingerprintAttributeList& fingerprints =
aTransport.mDtls->GetFingerprints();
for (const auto& fingerprint : fingerprints.mFingerprints) {
std::ostringstream ss;
ss << fingerprint.hashFunc;
rv = dtls->SetVerificationDigest(ss.str(), &fingerprint.fingerprint[0],
fingerprint.fingerprint.size());
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Could not set fingerprint");
return rv;
}
}
std::vector<uint16_t> srtpCiphers;
srtpCiphers.push_back(SRTP_AES128_CM_HMAC_SHA1_80);
srtpCiphers.push_back(SRTP_AES128_CM_HMAC_SHA1_32);
rv = dtls->SetSrtpCiphers(srtpCiphers);
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Couldn't set SRTP ciphers");
return rv;
}
// Always permits negotiation of the confidential mode.
// Only allow non-confidential (which is an allowed default),
// if we aren't confidential.
std::set<std::string> alpn;
std::string alpnDefault = "";
alpn.insert("c-webrtc");
if (!mPC->PrivacyRequested()) {
alpnDefault = "webrtc";
alpn.insert(alpnDefault);
}
rv = dtls->SetAlpn(alpn, alpnDefault);
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Couldn't set ALPN");
return rv;
}
nsAutoPtr<PtrVector<TransportLayer> > layers(new PtrVector<TransportLayer>);
layers->values.push_back(ice.release());
layers->values.push_back(dtls.release());
rv = mPCMedia->GetSTSThread()->Dispatch(
WrapRunnableNM(FinalizeTransportFlow_s, mPCMedia, flow, aLevel, aIsRtcp,
layers),
NS_DISPATCH_NORMAL);
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Failed to dispatch FinalizeTransportFlow_s");
return rv;
}
mPCMedia->AddTransportFlow(aLevel, aIsRtcp, flow);
*aFlowOutparam = flow;
return NS_OK;
}
nsresult
MediaPipelineFactory::GetTransportParameters(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
size_t* aLevelOut,
RefPtr<TransportFlow>* aRtpOut,
RefPtr<TransportFlow>* aRtcpOut,
nsAutoPtr<MediaPipelineFilter>* aFilterOut)
{
*aLevelOut = aTrackPair.mLevel;
size_t transportLevel = aTrackPair.HasBundleLevel() ?
aTrackPair.BundleLevel() :
aTrackPair.mLevel;
nsresult rv = CreateOrGetTransportFlow(
transportLevel, false, *aTrackPair.mRtpTransport, aRtpOut);
if (NS_FAILED(rv)) {
return rv;
}
MOZ_ASSERT(aRtpOut);
if (aTrackPair.mRtcpTransport) {
rv = CreateOrGetTransportFlow(
transportLevel, true, *aTrackPair.mRtcpTransport, aRtcpOut);
if (NS_FAILED(rv)) {
return rv;
}
MOZ_ASSERT(aRtcpOut);
}
if (aTrackPair.HasBundleLevel()) {
bool receiving = aTrack.GetDirection() == sdp::kRecv;
*aFilterOut = new MediaPipelineFilter;
if (receiving) {
// Add remote SSRCs so we can distinguish which RTP packets actually
// belong to this pipeline (also RTCP sender reports).
for (unsigned int ssrc : aTrack.GetSsrcs()) {
(*aFilterOut)->AddRemoteSSRC(ssrc);
}
// TODO(bug 1105005): Tell the filter about the mid for this track
// Add unique payload types as a last-ditch fallback
auto uniquePts = aTrack.GetNegotiatedDetails()->GetUniquePayloadTypes();
for (unsigned char& uniquePt : uniquePts) {
(*aFilterOut)->AddUniquePT(uniquePt);
}
}
}
return NS_OK;
}
nsresult
MediaPipelineFactory::CreateOrUpdateMediaPipeline(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack)
{
// The GMP code is all the way on the other side of webrtc.org, and it is not
// feasible to plumb this information all the way through. So, we set it (for
// the duration of this call) in a global variable. This allows the GMP code
// to report errors to the PC.
WebrtcGmpPCHandleSetter setter(mPC->GetHandle());
MOZ_ASSERT(aTrackPair.mRtpTransport);
bool receiving = aTrack.GetDirection() == sdp::kRecv;
size_t level;
RefPtr<TransportFlow> rtpFlow;
RefPtr<TransportFlow> rtcpFlow;
nsAutoPtr<MediaPipelineFilter> filter;
nsresult rv = GetTransportParameters(aTrackPair,
aTrack,
&level,
&rtpFlow,
&rtcpFlow,
&filter);
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Failed to get transport parameters for pipeline, rv="
<< static_cast<unsigned>(rv));
return rv;
}
if (aTrack.GetMediaType() == SdpMediaSection::kApplication) {
// GetTransportParameters has already done everything we need for
// datachannel.
return NS_OK;
}
// Find the stream we need
SourceStreamInfo* stream;
if (receiving) {
stream = mPCMedia->GetRemoteStreamById(aTrack.GetStreamId());
} else {
stream = mPCMedia->GetLocalStreamById(aTrack.GetStreamId());
}
if (!stream) {
MOZ_MTLOG(ML_ERROR, "Negotiated " << (receiving ? "recv" : "send")
<< " stream id " << aTrack.GetStreamId() << " was never added");
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
if (!stream->HasTrack(aTrack.GetTrackId())) {
MOZ_MTLOG(ML_ERROR, "Negotiated " << (receiving ? "recv" : "send")
<< " track id " << aTrack.GetTrackId() << " was never added");
MOZ_ASSERT(false);
return NS_ERROR_FAILURE;
}
RefPtr<MediaSessionConduit> conduit;
if (aTrack.GetMediaType() == SdpMediaSection::kAudio) {
rv = GetOrCreateAudioConduit(aTrackPair, aTrack, &conduit);
if (NS_FAILED(rv)) {
return rv;
}
} else if (aTrack.GetMediaType() == SdpMediaSection::kVideo) {
rv = GetOrCreateVideoConduit(aTrackPair, aTrack, &conduit);
if (NS_FAILED(rv)) {
return rv;
}
conduit->SetPCHandle(mPC->GetHandle());
} else {
// We've created the TransportFlow, nothing else to do here.
return NS_OK;
}
if (aTrack.GetActive()) {
if (receiving) {
auto error = conduit->StartReceiving();
if (error) {
MOZ_MTLOG(ML_ERROR, "StartReceiving failed: " << error);
return NS_ERROR_FAILURE;
}
} else {
auto error = conduit->StartTransmitting();
if (error) {
MOZ_MTLOG(ML_ERROR, "StartTransmitting failed: " << error);
return NS_ERROR_FAILURE;
}
}
} else {
if (receiving) {
auto error = conduit->StopReceiving();
if (error) {
MOZ_MTLOG(ML_ERROR, "StopReceiving failed: " << error);
return NS_ERROR_FAILURE;
}
} else {
auto error = conduit->StopTransmitting();
if (error) {
MOZ_MTLOG(ML_ERROR, "StopTransmitting failed: " << error);
return NS_ERROR_FAILURE;
}
}
}
RefPtr<MediaPipeline> pipeline =
stream->GetPipelineByTrackId_m(aTrack.GetTrackId());
if (pipeline && pipeline->level() != static_cast<int>(level)) {
MOZ_MTLOG(ML_WARNING, "Track " << aTrack.GetTrackId() <<
" has moved from level " << pipeline->level() <<
" to level " << level <<
". This requires re-creating the MediaPipeline.");
RefPtr<dom::MediaStreamTrack> domTrack =
stream->GetTrackById(aTrack.GetTrackId());
MOZ_ASSERT(domTrack, "MediaPipeline existed for a track, but no MediaStreamTrack");
// Since we do not support changing the conduit on a pre-existing
// MediaPipeline
pipeline = nullptr;
stream->RemoveTrack(aTrack.GetTrackId());
stream->AddTrack(aTrack.GetTrackId(), domTrack);
}
if (pipeline) {
pipeline->UpdateTransport_m(level, rtpFlow, rtcpFlow, filter);
return NS_OK;
}
MOZ_MTLOG(ML_DEBUG,
"Creating media pipeline"
<< " m-line index=" << aTrackPair.mLevel
<< " type=" << aTrack.GetMediaType()
<< " direction=" << aTrack.GetDirection());
if (receiving) {
rv = CreateMediaPipelineReceiving(aTrackPair, aTrack,
level, rtpFlow, rtcpFlow, filter,
conduit);
if (NS_FAILED(rv))
return rv;
} else {
rv = CreateMediaPipelineSending(aTrackPair, aTrack,
level, rtpFlow, rtcpFlow, filter,
conduit);
if (NS_FAILED(rv))
return rv;
}
return NS_OK;
}
nsresult
MediaPipelineFactory::CreateMediaPipelineReceiving(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
size_t aLevel,
RefPtr<TransportFlow> aRtpFlow,
RefPtr<TransportFlow> aRtcpFlow,
nsAutoPtr<MediaPipelineFilter> aFilter,
const RefPtr<MediaSessionConduit>& aConduit)
{
// We will error out earlier if this isn't here.
RefPtr<RemoteSourceStreamInfo> stream =
mPCMedia->GetRemoteStreamById(aTrack.GetStreamId());
RefPtr<MediaPipelineReceive> pipeline;
TrackID numericTrackId = stream->GetNumericTrackId(aTrack.GetTrackId());
MOZ_ASSERT(IsTrackIDExplicit(numericTrackId));
MOZ_MTLOG(ML_DEBUG, __FUNCTION__ << ": Creating pipeline for "
<< numericTrackId << " -> " << aTrack.GetTrackId());
if (aTrack.GetMediaType() == SdpMediaSection::kAudio) {
pipeline = new MediaPipelineReceiveAudio(
mPC->GetHandle(),
mPC->GetMainThread().get(),
mPC->GetSTSThread(),
stream->GetMediaStream()->GetInputStream()->AsSourceStream(),
aTrack.GetTrackId(),
numericTrackId,
aLevel,
static_cast<AudioSessionConduit*>(aConduit.get()), // Ugly downcast.
aRtpFlow,
aRtcpFlow,
aFilter);
} else if (aTrack.GetMediaType() == SdpMediaSection::kVideo) {
pipeline = new MediaPipelineReceiveVideo(
mPC->GetHandle(),
mPC->GetMainThread().get(),
mPC->GetSTSThread(),
stream->GetMediaStream()->GetInputStream()->AsSourceStream(),
aTrack.GetTrackId(),
numericTrackId,
aLevel,
static_cast<VideoSessionConduit*>(aConduit.get()), // Ugly downcast.
aRtpFlow,
aRtcpFlow,
aFilter);
} else {
MOZ_ASSERT(false);
MOZ_MTLOG(ML_ERROR, "Invalid media type in CreateMediaPipelineReceiving");
return NS_ERROR_FAILURE;
}
nsresult rv = pipeline->Init();
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Couldn't initialize receiving pipeline");
return rv;
}
rv = stream->StorePipeline(aTrack.GetTrackId(),
RefPtr<MediaPipeline>(pipeline));
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Couldn't store receiving pipeline " <<
static_cast<unsigned>(rv));
return rv;
}
stream->SyncPipeline(pipeline);
return NS_OK;
}
nsresult
MediaPipelineFactory::CreateMediaPipelineSending(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
size_t aLevel,
RefPtr<TransportFlow> aRtpFlow,
RefPtr<TransportFlow> aRtcpFlow,
nsAutoPtr<MediaPipelineFilter> aFilter,
const RefPtr<MediaSessionConduit>& aConduit)
{
nsresult rv;
// This is checked earlier
RefPtr<LocalSourceStreamInfo> stream =
mPCMedia->GetLocalStreamById(aTrack.GetStreamId());
dom::MediaStreamTrack* track =
stream->GetTrackById(aTrack.GetTrackId());
MOZ_ASSERT(track);
// Now we have all the pieces, create the pipeline
RefPtr<MediaPipelineTransmit> pipeline = new MediaPipelineTransmit(
mPC->GetHandle(),
mPC->GetMainThread().get(),
mPC->GetSTSThread(),
track,
aTrack.GetTrackId(),
aLevel,
aConduit,
aRtpFlow,
aRtcpFlow,
aFilter);
// implement checking for peerIdentity (where failure == black/silence)
nsIDocument* doc = mPC->GetWindow()->GetExtantDoc();
if (doc) {
pipeline->UpdateSinkIdentity_m(track,
doc->NodePrincipal(),
mPC->GetPeerIdentity());
} else {
MOZ_MTLOG(ML_ERROR, "Cannot initialize pipeline without attached doc");
return NS_ERROR_FAILURE; // Don't remove this till we know it's safe.
}
rv = pipeline->Init();
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Couldn't initialize sending pipeline");
return rv;
}
rv = stream->StorePipeline(aTrack.GetTrackId(),
RefPtr<MediaPipeline>(pipeline));
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Couldn't store receiving pipeline " <<
static_cast<unsigned>(rv));
return rv;
}
return NS_OK;
}
nsresult
MediaPipelineFactory::GetOrCreateAudioConduit(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
RefPtr<MediaSessionConduit>* aConduitp)
{
if (!aTrack.GetNegotiatedDetails()) {
MOZ_ASSERT(false, "Track is missing negotiated details");
return NS_ERROR_INVALID_ARG;
}
bool receiving = aTrack.GetDirection() == sdp::kRecv;
RefPtr<AudioSessionConduit> conduit =
mPCMedia->GetAudioConduit(aTrackPair.mLevel);
if (!conduit) {
conduit = AudioSessionConduit::Create();
if (!conduit) {
MOZ_MTLOG(ML_ERROR, "Could not create audio conduit");
return NS_ERROR_FAILURE;
}
mPCMedia->AddAudioConduit(aTrackPair.mLevel, conduit);
}
PtrVector<AudioCodecConfig> configs;
nsresult rv = NegotiatedDetailsToAudioCodecConfigs(
*aTrack.GetNegotiatedDetails(), &configs);
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Failed to convert JsepCodecDescriptions to "
"AudioCodecConfigs.");
return rv;
}
if (configs.values.empty()) {
MOZ_MTLOG(ML_ERROR, "Can't set up a conduit with 0 codecs");
return NS_ERROR_FAILURE;
}
if (receiving) {
auto error = conduit->ConfigureRecvMediaCodecs(configs.values);
if (error) {
MOZ_MTLOG(ML_ERROR, "ConfigureRecvMediaCodecs failed: " << error);
return NS_ERROR_FAILURE;
}
if (!aTrackPair.mSending) {
// No send track, but we still need to configure an SSRC for receiver
// reports.
if (!conduit->SetLocalSSRCs(std::vector<unsigned int>(1,aTrackPair.mRecvonlySsrc))) {
MOZ_MTLOG(ML_ERROR, "SetLocalSSRC failed");
return NS_ERROR_FAILURE;
}
}
} else {
auto ssrcs = aTrack.GetSsrcs();
if (!ssrcs.empty()) {
if (!conduit->SetLocalSSRCs(ssrcs)) {
MOZ_MTLOG(ML_ERROR, "SetLocalSSRCs failed");
return NS_ERROR_FAILURE;
}
}
conduit->SetLocalCNAME(aTrack.GetCNAME().c_str());
conduit->SetLocalMID(aTrackPair.mRtpTransport->mTransportId);
for (auto value: configs.values) {
if (value->mName == "telephone-event") {
// we have a telephone event codec, so we need to make sure
// the dynamic pt is set properly
conduit->SetDtmfPayloadType(value->mType, value->mFreq);
break;
}
}
auto error = conduit->ConfigureSendMediaCodec(configs.values[0]);
if (error) {
MOZ_MTLOG(ML_ERROR, "ConfigureSendMediaCodec failed: " << error);
return NS_ERROR_FAILURE;
}
// Should these be genericized like they are in the video conduit case?
const SdpExtmapAttributeList::Extmap* audioLevelExt =
aTrack.GetNegotiatedDetails()->GetExt(
webrtc::RtpExtension::kAudioLevelUri);
if (audioLevelExt) {
MOZ_MTLOG(ML_DEBUG, "Calling EnableAudioLevelExtension");
error = conduit->EnableAudioLevelExtension(true, audioLevelExt->entry);
if (error) {
MOZ_MTLOG(ML_ERROR, "EnableAudioLevelExtension failed: " << error);
return NS_ERROR_FAILURE;
}
}
const SdpExtmapAttributeList::Extmap* midExt =
aTrack.GetNegotiatedDetails()->GetExt(webrtc::RtpExtension::kMIdUri);
if (midExt) {
MOZ_MTLOG(ML_DEBUG, "Calling EnableMIDExtension");
error = conduit->EnableMIDExtension(true, midExt->entry);
if (error) {
MOZ_MTLOG(ML_ERROR, "EnableMIDExtension failed: " << error);
return NS_ERROR_FAILURE;
}
}
}
*aConduitp = conduit;
return NS_OK;
}
nsresult
MediaPipelineFactory::GetOrCreateVideoConduit(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
RefPtr<MediaSessionConduit>* aConduitp)
{
if (!aTrack.GetNegotiatedDetails()) {
MOZ_ASSERT(false, "Track is missing negotiated details");
return NS_ERROR_INVALID_ARG;
}
bool receiving = aTrack.GetDirection() == sdp::kRecv;
RefPtr<VideoSessionConduit> conduit =
mPCMedia->GetVideoConduit(aTrackPair.mLevel);
if (!conduit) {
conduit = VideoSessionConduit::Create(mPCMedia->mCall);
if (!conduit) {
MOZ_MTLOG(ML_ERROR, "Could not create video conduit");
return NS_ERROR_FAILURE;
}
mPCMedia->AddVideoConduit(aTrackPair.mLevel, conduit);
}
PtrVector<VideoCodecConfig> configs;
nsresult rv = NegotiatedDetailsToVideoCodecConfigs(
*aTrack.GetNegotiatedDetails(), &configs);
if (NS_FAILED(rv)) {
MOZ_MTLOG(ML_ERROR, "Failed to convert JsepCodecDescriptions to "
"VideoCodecConfigs.");
return rv;
}
if (configs.values.empty()) {
MOZ_MTLOG(ML_ERROR, "Can't set up a conduit with 0 codecs");
return NS_ERROR_FAILURE;
}
const std::vector<uint32_t>* ssrcs;
const JsepTrackNegotiatedDetails* details = aTrack.GetNegotiatedDetails();
std::vector<webrtc::RtpExtension> extmaps;
if (details) {
// @@NG read extmap from track
details->ForEachRTPHeaderExtension(
[&extmaps](const SdpExtmapAttributeList::Extmap& extmap)
{
extmaps.emplace_back(extmap.extensionname,extmap.entry);
});
}
if (receiving) {
// NOTE(pkerr) - the Call API requires the both local_ssrc and remote_ssrc be
// set to a non-zero value or the CreateVideo...Stream call will fail.
if (aTrackPair.mSending) {
ssrcs = &aTrackPair.mSending->GetSsrcs();
if (!ssrcs->empty()) {
conduit->SetLocalSSRCs(*ssrcs);
}
} else {
// No send track, but we still need to configure an SSRC for receiver
// reports.
if (!conduit->SetLocalSSRCs(std::vector<unsigned int>(1,aTrackPair.mRecvonlySsrc))) {
MOZ_MTLOG(ML_ERROR, "SetLocalSSRCs failed");
return NS_ERROR_FAILURE;
}
}
ssrcs = &aTrack.GetSsrcs();
// NOTE(pkerr) - this is new behavior. Needed because the CreateVideoReceiveStream
// method of the Call API will assert (in debug) and fail if a value is not provided
// for the remote_ssrc that will be used by the far-end sender.
if (!ssrcs->empty()) {
conduit->SetRemoteSSRC(ssrcs->front());
}
if (!extmaps.empty()) {
conduit->SetLocalRTPExtensions(false, extmaps);
}
auto error = conduit->ConfigureRecvMediaCodecs(configs.values);
if (error) {
MOZ_MTLOG(ML_ERROR, "ConfigureRecvMediaCodecs failed: " << error);
return NS_ERROR_FAILURE;
}
} else { //Create a send side
// For now we only expect to have one ssrc per local track.
ssrcs = &aTrack.GetSsrcs();
if (ssrcs->empty()) {
MOZ_MTLOG(ML_ERROR, "No SSRC set for send track");
return NS_ERROR_FAILURE;
}
if (!conduit->SetLocalSSRCs(*ssrcs)) {
MOZ_MTLOG(ML_ERROR, "SetLocalSSRC failed");
return NS_ERROR_FAILURE;
}
conduit->SetLocalCNAME(aTrack.GetCNAME().c_str());
conduit->SetLocalMID(aTrackPair.mRtpTransport->mTransportId);
rv = ConfigureVideoCodecMode(aTrack, *conduit);
if (NS_FAILED(rv)) {
return rv;
}
if (!extmaps.empty()) {
conduit->SetLocalRTPExtensions(true, extmaps);
}
auto error = conduit->ConfigureSendMediaCodec(configs.values[0]);
if (error) {
MOZ_MTLOG(ML_ERROR, "ConfigureSendMediaCodec failed: " << error);
return NS_ERROR_FAILURE;
}
}
*aConduitp = conduit;
return NS_OK;
}
nsresult
MediaPipelineFactory::ConfigureVideoCodecMode(const JsepTrack& aTrack,
VideoSessionConduit& aConduit)
{
RefPtr<LocalSourceStreamInfo> stream =
mPCMedia->GetLocalStreamByTrackId(aTrack.GetTrackId());
//get video track
RefPtr<mozilla::dom::MediaStreamTrack> track =
stream->GetTrackById(aTrack.GetTrackId());
RefPtr<mozilla::dom::VideoStreamTrack> videotrack =
track->AsVideoStreamTrack();
if (!videotrack) {
MOZ_MTLOG(ML_ERROR, "video track not available");
return NS_ERROR_FAILURE;
}
dom::MediaSourceEnum source = videotrack->GetSource().GetMediaSource();
webrtc::VideoCodecMode mode = webrtc::kRealtimeVideo;
switch (source) {
case dom::MediaSourceEnum::Browser:
case dom::MediaSourceEnum::Screen:
case dom::MediaSourceEnum::Application:
case dom::MediaSourceEnum::Window:
mode = webrtc::kScreensharing;
break;
case dom::MediaSourceEnum::Camera:
default:
mode = webrtc::kRealtimeVideo;
break;
}
auto error = aConduit.ConfigureCodecMode(mode);
if (error) {
MOZ_MTLOG(ML_ERROR, "ConfigureCodecMode failed: " << error);
return NS_ERROR_FAILURE;
}
return NS_OK;
}
} // namespace mozilla

Просмотреть файл

@ -1,78 +0,0 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef _MEDIAPIPELINEFACTORY_H_
#define _MEDIAPIPELINEFACTORY_H_
#include "MediaConduitInterface.h"
#include "PeerConnectionMedia.h"
#include "transportflow.h"
#include "signaling/src/jsep/JsepTrack.h"
#include "mozilla/RefPtr.h"
#include "mozilla/UniquePtr.h"
namespace mozilla {
class MediaPipelineFactory
{
public:
explicit MediaPipelineFactory(PeerConnectionMedia* aPCMedia)
: mPCMedia(aPCMedia), mPC(aPCMedia->GetPC())
{
}
nsresult CreateOrUpdateMediaPipeline(const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack);
private:
nsresult CreateMediaPipelineReceiving(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
size_t level,
RefPtr<TransportFlow> aRtpFlow,
RefPtr<TransportFlow> aRtcpFlow,
nsAutoPtr<MediaPipelineFilter> filter,
const RefPtr<MediaSessionConduit>& aConduit);
nsresult CreateMediaPipelineSending(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
size_t level,
RefPtr<TransportFlow> aRtpFlow,
RefPtr<TransportFlow> aRtcpFlow,
nsAutoPtr<MediaPipelineFilter> filter,
const RefPtr<MediaSessionConduit>& aConduit);
nsresult GetOrCreateAudioConduit(const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
RefPtr<MediaSessionConduit>* aConduitp);
nsresult GetOrCreateVideoConduit(const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
RefPtr<MediaSessionConduit>* aConduitp);
nsresult CreateOrGetTransportFlow(size_t aLevel, bool aIsRtcp,
const JsepTransport& transport,
RefPtr<TransportFlow>* out);
nsresult GetTransportParameters(const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
size_t* aLevelOut,
RefPtr<TransportFlow>* aRtpOut,
RefPtr<TransportFlow>* aRtcpOut,
nsAutoPtr<MediaPipelineFilter>* aFilterOut);
nsresult ConfigureVideoCodecMode(const JsepTrack& aTrack,
VideoSessionConduit& aConduit);
private:
// Not owned, and assumed to exist as long as the factory.
// The factory is a transient object, so this is fairly easy.
PeerConnectionMedia* mPCMedia;
PeerConnectionImpl* mPC;
};
} // namespace mozilla
#endif

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -38,6 +38,7 @@
#include "mozilla/ErrorResult.h"
#include "mozilla/dom/PeerConnectionImplEnumsBinding.h"
#include "mozilla/dom/RTCPeerConnectionBinding.h" // mozPacketDumpType, maybe move?
#include "mozilla/dom/RTCRtpTransceiverBinding.h"
#include "PrincipalChangeObserver.h"
#include "StreamTracks.h"
@ -65,6 +66,7 @@ class NrIceMediaStream;
class NrIceStunServer;
class NrIceTurnServer;
class MediaPipeline;
class TransceiverImpl;
class DOMMediaStream;
@ -255,7 +257,7 @@ public:
static already_AddRefed<PeerConnectionImpl>
Constructor(const mozilla::dom::GlobalObject& aGlobal, ErrorResult& rv);
static PeerConnectionImpl* CreatePeerConnection();
already_AddRefed<DOMMediaStream> MakeMediaStream();
OwningNonNull<DOMMediaStream> MakeMediaStream();
nsresult CreateRemoteSourceStreamInfo(RefPtr<RemoteSourceStreamInfo>* aInfo,
const std::string& aId);
@ -362,9 +364,7 @@ public:
rv = SetLocalDescription(aAction, NS_ConvertUTF16toUTF8(aSDP).get());
}
nsresult CreateNewRemoteTracks(RefPtr<PeerConnectionObserver>& aPco);
void RemoveOldRemoteTracks(RefPtr<PeerConnectionObserver>& aPco);
void FireOnTrackEvents(RefPtr<PeerConnectionObserver>& aPco);
NS_IMETHODIMP SetRemoteDescription (int32_t aAction, const char* aSDP);
@ -398,27 +398,27 @@ public:
rv = CloseStreams();
}
NS_IMETHODIMP_TO_ERRORRESULT(AddTrack, ErrorResult &rv,
mozilla::dom::MediaStreamTrack& aTrack,
const mozilla::dom::Sequence<mozilla::OwningNonNull<DOMMediaStream>>& aStreams)
{
rv = AddTrack(aTrack, aStreams);
}
NS_IMETHODIMP_TO_ERRORRESULT(RemoveTrack, ErrorResult &rv,
mozilla::dom::MediaStreamTrack& aTrack)
{
rv = RemoveTrack(aTrack);
}
nsresult
AddTrack(mozilla::dom::MediaStreamTrack& aTrack, DOMMediaStream& aStream);
already_AddRefed<TransceiverImpl> CreateTransceiverImpl(
const nsAString& aKind,
dom::MediaStreamTrack* aSendTrack,
ErrorResult& rv);
OwningNonNull<DOMMediaStream> CreateReceiveStreamWithTrack(
SdpMediaSection::MediaType type);
bool CheckNegotiationNeeded(ErrorResult &rv);
NS_IMETHODIMP_TO_ERRORRESULT(InsertDTMF, ErrorResult &rv,
dom::RTCRtpSender& sender,
TransceiverImpl& transceiver,
const nsAString& tones,
uint32_t duration, uint32_t interToneGap) {
rv = InsertDTMF(sender, tones, duration, interToneGap);
rv = InsertDTMF(transceiver, tones, duration, interToneGap);
}
NS_IMETHODIMP_TO_ERRORRESULT(GetDTMFToneBuffer, ErrorResult &rv,
@ -427,11 +427,11 @@ public:
rv = GetDTMFToneBuffer(sender, outToneBuffer);
}
NS_IMETHODIMP_TO_ERRORRESULT(ReplaceTrack, ErrorResult &rv,
mozilla::dom::MediaStreamTrack& aThisTrack,
mozilla::dom::MediaStreamTrack& aWithTrack)
NS_IMETHODIMP_TO_ERRORRESULT(ReplaceTrackNoRenegotiation, ErrorResult &rv,
TransceiverImpl& aTransceiver,
mozilla::dom::MediaStreamTrack* aWithTrack)
{
rv = ReplaceTrack(aThisTrack, aWithTrack);
rv = ReplaceTrackNoRenegotiation(aTransceiver, aWithTrack);
}
NS_IMETHODIMP_TO_ERRORRESULT(SetParameters, ErrorResult &rv,
@ -595,18 +595,6 @@ public:
bool aExternalNegotiated,
uint16_t aStream);
NS_IMETHODIMP_TO_ERRORRESULT(GetLocalStreams, ErrorResult &rv,
nsTArray<RefPtr<DOMMediaStream > >& result)
{
rv = GetLocalStreams(result);
}
NS_IMETHODIMP_TO_ERRORRESULT(GetRemoteStreams, ErrorResult &rv,
nsTArray<RefPtr<DOMMediaStream > >& result)
{
rv = GetRemoteStreams(result);
}
// Called whenever something is unrecognized by the parser
// May be called more than once and does not necessarily mean
// that parsing was stopped, only that something was unrecognized.
@ -645,9 +633,6 @@ public:
// PeerConnectionMedia can't do it because it doesn't know about principals
virtual void PrincipalChanged(dom::MediaStreamTrack* aTrack) override;
static std::string GetStreamId(const DOMMediaStream& aStream);
static std::string GetTrackId(const dom::MediaStreamTrack& track);
void OnMediaError(const std::string& aError);
bool ShouldDumpPacket(size_t level, dom::mozPacketDumpType type,
@ -702,14 +687,11 @@ private:
bool* mmsset,
uint16_t* level) const;
static void DeferredAddTrackToJsepSession(const std::string& pcHandle,
SdpMediaSection::MediaType type,
const std::string& streamId,
const std::string& trackId);
nsresult AddTrackToJsepSession(SdpMediaSection::MediaType type,
const std::string& streamId,
const std::string& trackId);
nsresult AddRtpTransceiverToJsepSession(RefPtr<JsepTransceiver>& transceiver);
already_AddRefed<TransceiverImpl> CreateTransceiverImpl(
JsepTransceiver* aJsepTransceiver,
dom::MediaStreamTrack* aSendTrack,
ErrorResult& aRv);
nsresult SetupIceRestart();
nsresult RollbackIceRestart();
@ -732,10 +714,6 @@ private:
// or other things.
void RecordLongtermICEStatistics();
void OnNegotiationNeeded();
static void MaybeFireNegotiationNeeded_static(const std::string& pcHandle);
void MaybeFireNegotiationNeeded();
// Timecard used to measure processing time. This should be the first class
// attribute so that we accurately measure the time required to instantiate
// any other attributes of this class.
@ -817,8 +795,6 @@ private:
bool mTrickle;
bool mNegotiationNeeded;
bool mPrivateWindow;
// Whether this PeerConnection is being counted as active by mWindow
@ -830,11 +806,12 @@ private:
// DTMF
struct DTMFState {
PeerConnectionImpl* mPeerConnectionImpl;
DTMFState();
~DTMFState();
nsWeakPtr mPCObserver;
RefPtr<TransceiverImpl> mTransceiver;
nsCOMPtr<nsITimer> mSendTimer;
nsString mTrackId;
nsString mTones;
size_t mLevel;
uint32_t mDuration;
uint32_t mInterToneGap;
};
@ -842,6 +819,7 @@ private:
static void
DTMFSendTimerCallback_m(nsITimer* timer, void*);
// TODO(bug 1401983): Move DTMF stuff to TransceiverImpl
nsTArray<DTMFState> mDTMFStates;
std::vector<unsigned> mSendPacketDumpFlags;

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -9,229 +9,43 @@
#include <vector>
#include <map>
#include "nspr.h"
#include "prlock.h"
#include "mozilla/RefPtr.h"
#include "mozilla/UniquePtr.h"
#include "mozilla/net/StunAddrsRequestChild.h"
#include "nsComponentManagerUtils.h"
#include "nsIProtocolProxyCallback.h"
#include "signaling/src/jsep/JsepSession.h"
#include "AudioSegment.h"
#include "Layers.h"
#include "VideoUtils.h"
#include "ImageLayers.h"
#include "VideoSegment.h"
#include "MediaStreamTrack.h"
#include "TransceiverImpl.h"
class nsIPrincipal;
namespace mozilla {
class DataChannel;
class PeerIdentity;
class MediaPipelineFactory;
namespace dom {
struct RTCInboundRTPStreamStats;
struct RTCOutboundRTPStreamStats;
class MediaStreamTrack;
}
}
#include "nricectxhandler.h"
#include "nriceresolver.h"
#include "nricemediastream.h"
#include "MediaPipeline.h"
namespace mozilla {
class PeerConnectionImpl;
class PeerConnectionMedia;
class PCUuidGenerator;
class MediaPipeline;
class MediaPipelineFilter;
class JsepSession;
class SourceStreamInfo {
public:
SourceStreamInfo(DOMMediaStream* aMediaStream,
PeerConnectionMedia *aParent,
const std::string& aId)
: mMediaStream(aMediaStream),
mParent(aParent),
mId(aId) {
MOZ_ASSERT(mMediaStream);
}
SourceStreamInfo(already_AddRefed<DOMMediaStream>& aMediaStream,
PeerConnectionMedia *aParent,
const std::string& aId)
: mMediaStream(aMediaStream),
mParent(aParent),
mId(aId) {
MOZ_ASSERT(mMediaStream);
}
virtual ~SourceStreamInfo() {}
DOMMediaStream* GetMediaStream() const {
return mMediaStream;
}
nsresult StorePipeline(const std::string& trackId,
const RefPtr<MediaPipeline>& aPipeline);
virtual void AddTrack(const std::string& trackId,
const RefPtr<dom::MediaStreamTrack>& aTrack)
{
mTracks.insert(std::make_pair(trackId, aTrack));
}
virtual void RemoveTrack(const std::string& trackId);
bool HasTrack(const std::string& trackId) const
{
return !!mTracks.count(trackId);
}
size_t GetTrackCount() const { return mTracks.size(); }
// This method exists for stats and the unittests.
// It allows visibility into the pipelines and flows.
const std::map<std::string, RefPtr<MediaPipeline>>&
GetPipelines() const { return mPipelines; }
RefPtr<MediaPipeline> GetPipelineByTrackId_m(const std::string& trackId);
// This is needed so PeerConnectionImpl can unregister itself as
// PrincipalChangeObserver from each track.
const std::map<std::string, RefPtr<dom::MediaStreamTrack>>&
GetMediaStreamTracks() const { return mTracks; }
dom::MediaStreamTrack* GetTrackById(const std::string& trackId) const
{
auto it = mTracks.find(trackId);
if (it == mTracks.end()) {
return nullptr;
}
return it->second;
}
const std::string& GetId() const { return mId; }
void DetachTransport_s();
virtual void DetachMedia_m();
bool AnyCodecHasPluginID(uint64_t aPluginID);
protected:
void EndTrack(MediaStream* stream, dom::MediaStreamTrack* track);
RefPtr<DOMMediaStream> mMediaStream;
PeerConnectionMedia *mParent;
const std::string mId;
// These get set up before we generate our local description, the pipelines
// and conduits are set up once offer/answer completes.
std::map<std::string, RefPtr<dom::MediaStreamTrack>> mTracks;
std::map<std::string, RefPtr<MediaPipeline>> mPipelines;
};
// TODO(ekr@rtfm.com): Refactor {Local,Remote}SourceStreamInfo
// bug 837539.
class LocalSourceStreamInfo : public SourceStreamInfo {
~LocalSourceStreamInfo() {
mMediaStream = nullptr;
}
public:
LocalSourceStreamInfo(DOMMediaStream *aMediaStream,
PeerConnectionMedia *aParent,
const std::string& aId)
: SourceStreamInfo(aMediaStream, aParent, aId) {}
nsresult TakePipelineFrom(RefPtr<LocalSourceStreamInfo>& info,
const std::string& oldTrackId,
dom::MediaStreamTrack& aNewTrack,
const std::string& newTrackId);
void UpdateSinkIdentity_m(dom::MediaStreamTrack* aTrack,
nsIPrincipal* aPrincipal,
const PeerIdentity* aSinkIdentity);
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(LocalSourceStreamInfo)
private:
already_AddRefed<MediaPipeline> ForgetPipelineByTrackId_m(
const std::string& trackId);
};
class RemoteTrackSource : public dom::MediaStreamTrackSource
{
public:
explicit RemoteTrackSource(nsIPrincipal* aPrincipal, const nsString& aLabel)
: dom::MediaStreamTrackSource(aPrincipal, aLabel) {}
dom::MediaSourceEnum GetMediaSource() const override
{
return dom::MediaSourceEnum::Other;
}
already_AddRefed<PledgeVoid>
ApplyConstraints(nsPIDOMWindowInner* aWindow,
const dom::MediaTrackConstraints& aConstraints,
dom::CallerType aCallerType) override;
void Stop() override
{
// XXX (Bug 1314270): Implement rejection logic if necessary when we have
// clarity in the spec.
}
void SetPrincipal(nsIPrincipal* aPrincipal)
{
mPrincipal = aPrincipal;
PrincipalChanged();
}
protected:
virtual ~RemoteTrackSource() {}
};
class RemoteSourceStreamInfo : public SourceStreamInfo {
~RemoteSourceStreamInfo() {}
public:
RemoteSourceStreamInfo(already_AddRefed<DOMMediaStream> aMediaStream,
PeerConnectionMedia *aParent,
const std::string& aId)
: SourceStreamInfo(aMediaStream, aParent, aId),
mReceiving(false)
{
}
void DetachMedia_m() override;
void RemoveTrack(const std::string& trackId) override;
void SyncPipeline(RefPtr<MediaPipelineReceive> aPipeline);
void UpdatePrincipal_m(nsIPrincipal* aPrincipal);
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(RemoteSourceStreamInfo)
void AddTrack(const std::string& trackId,
const RefPtr<dom::MediaStreamTrack>& aTrack) override
{
SourceStreamInfo::AddTrack(trackId, aTrack);
}
TrackID GetNumericTrackId(const std::string& trackId) const
{
dom::MediaStreamTrack* track = GetTrackById(trackId);
if (!track) {
return TRACK_INVALID;
}
return track->mTrackID;
}
void StartReceiving();
private:
// True iff SetPullEnabled(true) has been called on the DOMMediaStream. This
// happens when offer/answer concludes.
bool mReceiving;
};
// TODO(bug 1402997): If we move the TransceiverImpl stuff out of here, this
// will be a class that handles just the transport stuff, and we can rename it
// to something more explanatory (say, PeerConnectionTransportManager).
class PeerConnectionMedia : public sigslot::has_slots<> {
~PeerConnectionMedia()
{
MOZ_RELEASE_ASSERT(!mMainThread);
}
~PeerConnectionMedia();
public:
explicit PeerConnectionMedia(PeerConnectionImpl *parent);
@ -264,8 +78,11 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
// Activate or remove ICE transports at the conclusion of offer/answer,
// or when rollback occurs.
void ActivateOrRemoveTransports(const JsepSession& aSession,
const bool forceIceTcp);
nsresult ActivateOrRemoveTransports(const JsepSession& aSession,
const bool forceIceTcp);
// Update the transports on the TransceiverImpls
nsresult UpdateTransceiverTransports(const JsepSession& aSession);
// Start ICE checks.
void StartIceChecks(const JsepSession& session);
@ -291,51 +108,38 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
void UpdateNetworkState(bool online);
// Handle complete media pipelines.
nsresult UpdateMediaPipelines(const JsepSession& session);
// This updates codec parameters, starts/stops send/receive, and other
// stuff that doesn't necessarily require negotiation. This can be called at
// any time, not just when an offer/answer exchange completes.
// TODO: Let's move this to PeerConnectionImpl
nsresult UpdateMediaPipelines();
// Add a track (main thread only)
nsresult AddTrack(DOMMediaStream& aMediaStream,
const std::string& streamId,
dom::MediaStreamTrack& aTrack,
const std::string& trackId);
// TODO: Let's move the TransceiverImpl stuff to PeerConnectionImpl.
nsresult AddTransceiver(
JsepTransceiver* aJsepTransceiver,
DOMMediaStream& aReceiveStream,
dom::MediaStreamTrack* aSendTrack,
RefPtr<TransceiverImpl>* aTransceiverImpl);
nsresult RemoveLocalTrack(const std::string& streamId,
const std::string& trackId);
nsresult RemoveRemoteTrack(const std::string& streamId,
const std::string& trackId);
void GetTransmitPipelinesMatching(
dom::MediaStreamTrack* aTrack,
nsTArray<RefPtr<MediaPipeline>>* aPipelines);
// Get a specific local stream
uint32_t LocalStreamsLength()
{
return mLocalSourceStreams.Length();
}
LocalSourceStreamInfo* GetLocalStreamByIndex(int index);
LocalSourceStreamInfo* GetLocalStreamById(const std::string& id);
LocalSourceStreamInfo* GetLocalStreamByTrackId(const std::string& id);
void GetReceivePipelinesMatching(
dom::MediaStreamTrack* aTrack,
nsTArray<RefPtr<MediaPipeline>>* aPipelines);
// Get a specific remote stream
uint32_t RemoteStreamsLength()
{
return mRemoteSourceStreams.Length();
}
nsresult AddRIDExtension(dom::MediaStreamTrack& aRecvTrack,
unsigned short aExtensionId);
RemoteSourceStreamInfo* GetRemoteStreamByIndex(size_t index);
RemoteSourceStreamInfo* GetRemoteStreamById(const std::string& id);
RemoteSourceStreamInfo* GetRemoteStreamByTrackId(const std::string& id);
// Add a remote stream.
nsresult AddRemoteStream(RefPtr<RemoteSourceStreamInfo> aInfo);
nsresult ReplaceTrack(const std::string& aOldStreamId,
const std::string& aOldTrackId,
dom::MediaStreamTrack& aNewTrack,
const std::string& aNewStreamId,
const std::string& aNewTrackId);
nsresult AddRIDFilter(dom::MediaStreamTrack& aRecvTrack,
const nsAString& aRid);
// In cases where the peer isn't yet identified, we disable the pipeline (not
// the stream, that would potentially affect others), so that it sends
// black/silence. Once the peer is identified, re-enable those streams.
// aTrack will be set if this update came from a principal change on aTrack.
// TODO: Move to PeerConnectionImpl
void UpdateSinkIdentity_m(dom::MediaStreamTrack* aTrack,
nsIPrincipal* aPrincipal,
const PeerIdentity* aSinkIdentity);
@ -366,6 +170,13 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
return mTransportFlows[index_inner];
}
// Used by PCImpl in a couple of places. Might be good to move that code in
// here.
std::vector<RefPtr<TransceiverImpl>>& GetTransceivers()
{
return mTransceivers;
}
// Add a transport flow
void AddTransportFlow(int aIndex, bool aRtcp,
const RefPtr<TransportFlow> &aFlow);
@ -376,45 +187,6 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
static void DtlsConnected_m(const std::string& aParentHandle,
bool aPrivacyRequested);
RefPtr<AudioSessionConduit> GetAudioConduit(size_t level) {
auto it = mConduits.find(level);
if (it == mConduits.end()) {
return nullptr;
}
if (it->second.first) {
MOZ_ASSERT(false, "In GetAudioConduit, we found a video conduit!");
return nullptr;
}
return RefPtr<AudioSessionConduit>(
static_cast<AudioSessionConduit*>(it->second.second.get()));
}
RefPtr<VideoSessionConduit> GetVideoConduit(size_t level) {
auto it = mConduits.find(level);
if (it == mConduits.end()) {
return nullptr;
}
if (!it->second.first) {
MOZ_ASSERT(false, "In GetVideoConduit, we found an audio conduit!");
return nullptr;
}
return RefPtr<VideoSessionConduit>(
static_cast<VideoSessionConduit*>(it->second.second.get()));
}
void AddVideoConduit(size_t level, const RefPtr<VideoSessionConduit> &aConduit) {
mConduits[level] = std::make_pair(true, aConduit);
}
// Add a conduit
void AddAudioConduit(size_t level, const RefPtr<AudioSessionConduit> &aConduit) {
mConduits[level] = std::make_pair(false, aConduit);
}
// ICE state signals
sigslot::signal2<NrIceCtx*, NrIceCtx::GatheringState>
SignalIceGatheringStateChange;
@ -429,6 +201,7 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
sigslot::signal1<uint16_t>
SignalEndOfLocalCandidates;
// TODO: Move to PeerConnectionImpl
RefPtr<WebRtcCallWrapper> mCall;
private:
@ -478,6 +251,10 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
const std::string& aPassword,
const std::vector<std::string>& aCandidateList);
void RemoveTransportsAtOrAfter_s(size_t aMLine);
nsresult UpdateTransportFlows(const JsepTransceiver& transceiver);
nsresult UpdateTransportFlow(size_t aLevel,
bool aIsRtcp,
const JsepTransport& aTransport);
void GatherIfReady();
void FlushIceCtxOperationQueueIfReady();
@ -544,15 +321,7 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
std::string mParentHandle;
std::string mParentName;
// A list of streams returned from GetUserMedia
// This is only accessed on the main thread (with one special exception)
nsTArray<RefPtr<LocalSourceStreamInfo> > mLocalSourceStreams;
// A list of streams provided by the other side
// This is only accessed on the main thread (with one special exception)
nsTArray<RefPtr<RemoteSourceStreamInfo> > mRemoteSourceStreams;
std::map<size_t, std::pair<bool, RefPtr<MediaSessionConduit>>> mConduits;
std::vector<RefPtr<TransceiverImpl>> mTransceivers;
// ICE objects
RefPtr<NrIceCtxHandler> mIceCtxHdlr;

Просмотреть файл

@ -0,0 +1,56 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef _REMOTE_TRACK_SOURCE_H_
#define _REMOTE_TRACK_SOURCE_H_
#include "MediaStreamTrack.h"
#include "MediaStreamError.h"
namespace mozilla {
class RemoteTrackSource : public dom::MediaStreamTrackSource
{
public:
explicit RemoteTrackSource(nsIPrincipal* aPrincipal, const nsString& aLabel)
: dom::MediaStreamTrackSource(aPrincipal, aLabel) {}
dom::MediaSourceEnum GetMediaSource() const override
{
return dom::MediaSourceEnum::Other;
}
already_AddRefed<PledgeVoid>
ApplyConstraints(nsPIDOMWindowInner* aWindow,
const dom::MediaTrackConstraints& aConstraints,
dom::CallerType aCallerType) override
{
RefPtr<PledgeVoid> p = new PledgeVoid();
p->Reject(
new dom::MediaStreamError(aWindow,
NS_LITERAL_STRING("OverconstrainedError"),
NS_LITERAL_STRING("")));
return p.forget();
}
void Stop() override
{
// XXX (Bug 1314270): Implement rejection logic if necessary when we have
// clarity in the spec.
}
void SetPrincipal(nsIPrincipal* aPrincipal)
{
mPrincipal = aPrincipal;
PrincipalChanged();
}
protected:
virtual ~RemoteTrackSource() {}
};
} // namespace mozilla
#endif // _REMOTE_TRACK_SOURCE_H_

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,148 @@
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef _TRANSCEIVERIMPL_H_
#define _TRANSCEIVERIMPL_H_
#include <string>
#include "mozilla/RefPtr.h"
#include "nsCOMPtr.h"
#include "nsIEventTarget.h"
#include "nsTArray.h"
#include "DOMMediaStream.h"
#include "mozilla/OwningNonNull.h"
#include "mozilla/dom/MediaStreamTrack.h"
#include "ErrorList.h"
#include "mtransport/transportflow.h"
#include "signaling/src/jsep/JsepTransceiver.h"
class nsIPrincipal;
namespace mozilla {
class PeerIdentity;
class PeerConnectionMedia;
class JsepTransceiver;
class MediaSessionConduit;
class VideoSessionConduit;
class MediaPipelineReceive;
class MediaPipelineTransmit;
class MediaPipeline;
class MediaPipelineFilter;
class WebRtcCallWrapper;
class JsepTrackNegotiatedDetails;
namespace dom {
class RTCRtpTransceiver;
}
/**
* This is what ties all the various pieces that make up a transceiver
* together. This includes:
* DOMMediaStream, MediaStreamTrack, SourceMediaStream for rendering and capture
* TransportFlow for RTP transmission/reception
* Audio/VideoConduit for feeding RTP/RTCP into webrtc.org for decoding, and
* feeding audio/video frames into webrtc.org for encoding into RTP/RTCP.
*/
class TransceiverImpl : public nsISupports {
public:
/**
* |aReceiveStream| is always set; this holds even if the remote end has not
* negotiated one for this transceiver. |aSendTrack| might or might not be
* set.
*/
TransceiverImpl(const std::string& aPCHandle,
JsepTransceiver* aJsepTransceiver,
nsIEventTarget* aMainThread,
nsIEventTarget* aStsThread,
DOMMediaStream& aReceiveStream,
dom::MediaStreamTrack* aSendTrack,
WebRtcCallWrapper* aCallWrapper);
nsresult UpdateSendTrack(dom::MediaStreamTrack* aSendTrack);
nsresult UpdateSinkIdentity(dom::MediaStreamTrack* aTrack,
nsIPrincipal* aPrincipal,
const PeerIdentity* aSinkIdentity);
nsresult UpdateTransport(PeerConnectionMedia& aTransportManager);
nsresult UpdateConduit();
nsresult UpdatePrincipal(nsIPrincipal* aPrincipal);
// TODO: We probably need to de-Sync when transceivers are stopped.
nsresult SyncWithMatchingVideoConduits(
std::vector<RefPtr<TransceiverImpl>>& transceivers);
void Shutdown_m();
bool ConduitHasPluginID(uint64_t aPluginID);
bool HasSendTrack(const dom::MediaStreamTrack* aSendTrack) const;
// This is so PCImpl can unregister from PrincipalChanged callbacks; maybe we
// should have TransceiverImpl handle these callbacks instead? It would need
// to be able to get a ref to PCImpl though.
RefPtr<dom::MediaStreamTrack> GetSendTrack()
{
return mSendTrack;
}
// for webidl
bool WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto,
JS::MutableHandle<JSObject*> aReflector);
already_AddRefed<dom::MediaStreamTrack> GetReceiveTrack();
void SyncWithJS(dom::RTCRtpTransceiver& aJsTransceiver, ErrorResult& aRv);
void InsertDTMFTone(int tone, uint32_t duration);
bool HasReceiveTrack(const dom::MediaStreamTrack* aReceiveTrack) const;
// TODO: These are for stats; try to find a cleaner way.
RefPtr<MediaPipeline> GetSendPipeline();
RefPtr<MediaPipeline> GetReceivePipeline();
void AddRIDExtension(unsigned short aExtensionId);
void AddRIDFilter(const nsAString& aRid);
bool IsVideo() const;
NS_DECL_THREADSAFE_ISUPPORTS
private:
virtual ~TransceiverImpl();
void InitAudio();
void InitVideo();
nsresult UpdateAudioConduit();
nsresult UpdateVideoConduit();
nsresult ConfigureVideoCodecMode(VideoSessionConduit& aConduit);
// This will eventually update audio extmap too
void UpdateVideoExtmap(const JsepTrackNegotiatedDetails& aDetails,
bool aSending);
void StartReceiveStream();
void Stop();
const std::string mPCHandle;
RefPtr<JsepTransceiver> mJsepTransceiver;
std::string mMid;
bool mHaveStartedReceiving;
bool mHaveSetupTransport;
nsCOMPtr<nsIEventTarget> mMainThread;
nsCOMPtr<nsIEventTarget> mStsThread;
RefPtr<DOMMediaStream> mReceiveStream;
RefPtr<dom::MediaStreamTrack> mSendTrack;
// state for webrtc.org that is shared between all transceivers
RefPtr<WebRtcCallWrapper> mCallWrapper;
RefPtr<TransportFlow> mRtpFlow;
RefPtr<TransportFlow> mRtcpFlow;
RefPtr<MediaSessionConduit> mConduit;
RefPtr<MediaPipelineReceive> mReceivePipeline;
RefPtr<MediaPipelineTransmit> mTransmitPipeline;
};
} // namespace mozilla
#endif // _TRANSCEIVERIMPL_H_

Просмотреть файл

@ -22,12 +22,11 @@ LOCAL_INCLUDES += [
]
UNIFIED_SOURCES += [
'MediaPipelineFactory.cpp',
'MediaStreamList.cpp',
'PacketDumper.cpp',
'PeerConnectionCtx.cpp',
'PeerConnectionImpl.cpp',
'PeerConnectionMedia.cpp',
'TransceiverImpl.cpp',
'WebrtcGlobalInformation.cpp',
]