Bug 1130534: Use a single bidirectional media conduit that MediaPipelines can attach/detach at will. r=jesup

--HG--
extra : rebase_source : 202a83e513d88bc14f1be2c5b438998461ff4a50
This commit is contained in:
Byron Campen [:bwc] 2015-02-10 10:11:24 -08:00
Родитель 248d6e6d79
Коммит 794f966f80
15 изменённых файлов: 671 добавлений и 624 удалений

Просмотреть файл

@ -41,13 +41,13 @@ const unsigned int WebrtcAudioConduit::CODEC_PLNAME_SIZE = 32;
/**
* Factory Method for AudioConduit
*/
mozilla::RefPtr<AudioSessionConduit> AudioSessionConduit::Create(AudioSessionConduit *aOther)
mozilla::RefPtr<AudioSessionConduit> AudioSessionConduit::Create()
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
WebrtcAudioConduit* obj = new WebrtcAudioConduit();
if(obj->Init(static_cast<WebrtcAudioConduit*>(aOther)) != kMediaConduitNoError)
if(obj->Init() != kMediaConduitNoError)
{
CSFLogError(logTag, "%s AudioConduit Init Failed ", __FUNCTION__);
delete obj;
@ -74,55 +74,39 @@ WebrtcAudioConduit::~WebrtcAudioConduit()
// The first one of a pair to be deleted shuts down media for both
if(mPtrVoEXmedia)
{
if (!mShutDown) {
mPtrVoEXmedia->SetExternalRecordingStatus(false);
mPtrVoEXmedia->SetExternalPlayoutStatus(false);
}
mPtrVoEXmedia->SetExternalRecordingStatus(false);
mPtrVoEXmedia->SetExternalPlayoutStatus(false);
}
//Deal with the transport
if(mPtrVoENetwork)
{
if (!mShutDown) {
mPtrVoENetwork->DeRegisterExternalTransport(mChannel);
}
mPtrVoENetwork->DeRegisterExternalTransport(mChannel);
}
if(mPtrVoEBase)
{
if (!mShutDown) {
mPtrVoEBase->StopPlayout(mChannel);
mPtrVoEBase->StopSend(mChannel);
mPtrVoEBase->StopReceive(mChannel);
mPtrVoEBase->DeleteChannel(mChannel);
mPtrVoEBase->Terminate();
}
mPtrVoEBase->StopPlayout(mChannel);
mPtrVoEBase->StopSend(mChannel);
mPtrVoEBase->StopReceive(mChannel);
mPtrVoEBase->DeleteChannel(mChannel);
mPtrVoEBase->Terminate();
}
if (mOtherDirection)
{
// mOtherDirection owns these now!
mOtherDirection->mOtherDirection = nullptr;
// let other side we terminated the channel
mOtherDirection->mShutDown = true;
mVoiceEngine = nullptr;
} else {
// We shouldn't delete the VoiceEngine until all these are released!
// And we can't use a Scoped ptr, since the order is arbitrary
mPtrVoENetwork = nullptr;
mPtrVoEBase = nullptr;
mPtrVoECodec = nullptr;
mPtrVoEXmedia = nullptr;
mPtrVoEProcessing = nullptr;
mPtrVoEVideoSync = nullptr;
mPtrVoERTP_RTCP = nullptr;
mPtrRTP = nullptr;
// We shouldn't delete the VoiceEngine until all these are released!
// And we can't use a Scoped ptr, since the order is arbitrary
mPtrVoENetwork = nullptr;
mPtrVoEBase = nullptr;
mPtrVoECodec = nullptr;
mPtrVoEXmedia = nullptr;
mPtrVoEProcessing = nullptr;
mPtrVoEVideoSync = nullptr;
mPtrVoERTP_RTCP = nullptr;
mPtrRTP = nullptr;
// only one opener can call Delete. Have it be the last to close.
if(mVoiceEngine)
{
webrtc::VoiceEngine::Delete(mVoiceEngine);
}
if(mVoiceEngine)
{
webrtc::VoiceEngine::Delete(mVoiceEngine);
}
}
@ -210,42 +194,32 @@ bool WebrtcAudioConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
/*
* WebRTCAudioConduit Implementation
*/
MediaConduitErrorCode WebrtcAudioConduit::Init(WebrtcAudioConduit *other)
MediaConduitErrorCode WebrtcAudioConduit::Init()
{
CSFLogDebug(logTag, "%s this=%p other=%p", __FUNCTION__, this, other);
CSFLogDebug(logTag, "%s this=%p", __FUNCTION__, this);
if (other) {
MOZ_ASSERT(!other->mOtherDirection);
other->mOtherDirection = this;
mOtherDirection = other;
// only one can call ::Create()/GetVoiceEngine()
MOZ_ASSERT(other->mVoiceEngine);
mVoiceEngine = other->mVoiceEngine;
} else {
#ifdef MOZ_WIDGET_ANDROID
jobject context = jsjni_GetGlobalContextRef();
jobject context = jsjni_GetGlobalContextRef();
// get the JVM
JavaVM *jvm = jsjni_GetVM();
JNIEnv* jenv = jsjni_GetJNIForThread();
// get the JVM
JavaVM *jvm = jsjni_GetVM();
JNIEnv* jenv = jsjni_GetJNIForThread();
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, jenv, (void*)context) != 0) {
CSFLogError(logTag, "%s Unable to set Android objects", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
#endif
// Per WebRTC APIs below function calls return nullptr on failure
if(!(mVoiceEngine = webrtc::VoiceEngine::Create()))
{
CSFLogError(logTag, "%s Unable to create voice engine", __FUNCTION__);
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, jenv, (void*)context) != 0) {
CSFLogError(logTag, "%s Unable to set Android objects", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
#endif
EnableWebRtcLog();
// Per WebRTC APIs below function calls return nullptr on failure
if(!(mVoiceEngine = webrtc::VoiceEngine::Create()))
{
CSFLogError(logTag, "%s Unable to create voice engine", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
EnableWebRtcLog();
if(!(mPtrVoEBase = VoEBase::GetInterface(mVoiceEngine)))
{
CSFLogError(logTag, "%s Unable to initialize VoEBase", __FUNCTION__);
@ -292,61 +266,65 @@ MediaConduitErrorCode WebrtcAudioConduit::Init(WebrtcAudioConduit *other)
return kMediaConduitSessionNotInited;
}
if (other) {
mChannel = other->mChannel;
} else {
// init the engine with our audio device layer
if(mPtrVoEBase->Init() == -1)
{
CSFLogError(logTag, "%s VoiceEngine Base Not Initialized", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if( (mChannel = mPtrVoEBase->CreateChannel()) == -1)
{
CSFLogError(logTag, "%s VoiceEngine Channel creation failed",__FUNCTION__);
return kMediaConduitChannelError;
}
CSFLogDebug(logTag, "%s Channel Created %d ",__FUNCTION__, mChannel);
if(mPtrVoENetwork->RegisterExternalTransport(mChannel, *this) == -1)
{
CSFLogError(logTag, "%s VoiceEngine, External Transport Failed",__FUNCTION__);
return kMediaConduitTransportRegistrationFail;
}
if(mPtrVoEXmedia->SetExternalRecordingStatus(true) == -1)
{
CSFLogError(logTag, "%s SetExternalRecordingStatus Failed %d",__FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitExternalPlayoutError;
}
if(mPtrVoEXmedia->SetExternalPlayoutStatus(true) == -1)
{
CSFLogError(logTag, "%s SetExternalPlayoutStatus Failed %d ",__FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitExternalRecordingError;
}
CSFLogDebug(logTag , "%s AudioSessionConduit Initialization Done (%p)",__FUNCTION__, this);
// init the engine with our audio device layer
if(mPtrVoEBase->Init() == -1)
{
CSFLogError(logTag, "%s VoiceEngine Base Not Initialized", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if( (mChannel = mPtrVoEBase->CreateChannel()) == -1)
{
CSFLogError(logTag, "%s VoiceEngine Channel creation failed",__FUNCTION__);
return kMediaConduitChannelError;
}
CSFLogDebug(logTag, "%s Channel Created %d ",__FUNCTION__, mChannel);
if(mPtrVoENetwork->RegisterExternalTransport(mChannel, *this) == -1)
{
CSFLogError(logTag, "%s VoiceEngine, External Transport Failed",__FUNCTION__);
return kMediaConduitTransportRegistrationFail;
}
if(mPtrVoEXmedia->SetExternalRecordingStatus(true) == -1)
{
CSFLogError(logTag, "%s SetExternalRecordingStatus Failed %d",__FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitExternalPlayoutError;
}
if(mPtrVoEXmedia->SetExternalPlayoutStatus(true) == -1)
{
CSFLogError(logTag, "%s SetExternalPlayoutStatus Failed %d ",__FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitExternalRecordingError;
}
CSFLogDebug(logTag , "%s AudioSessionConduit Initialization Done (%p)",__FUNCTION__, this);
return kMediaConduitNoError;
}
// AudioSessionConduit Implementation
MediaConduitErrorCode
WebrtcAudioConduit::AttachTransport(mozilla::RefPtr<TransportInterface> aTransport)
WebrtcAudioConduit::SetTransmitterTransport(mozilla::RefPtr<TransportInterface> aTransport)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
if(!aTransport)
{
CSFLogError(logTag, "%s NULL Transport", __FUNCTION__);
return kMediaConduitInvalidTransport;
}
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// set the transport
mTransport = aTransport;
mTransmitterTransport = aTransport;
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcAudioConduit::SetReceiverTransport(mozilla::RefPtr<TransportInterface> aTransport)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// set the transport
mReceiverTransport = aTransport;
return kMediaConduitNoError;
}
@ -364,20 +342,11 @@ WebrtcAudioConduit::ConfigureSendMediaCodec(const AudioCodecConfig* codecConfig)
return condError;
}
//are we transmitting already, stop and apply the send codec
if(mEngineTransmitting)
{
CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
if(mPtrVoEBase->StopSend(mChannel) == -1)
{
CSFLogError(logTag, "%s StopSend() Failed %d ", __FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitUnknownError;
}
condError = StopTransmitting();
if (condError != kMediaConduitNoError) {
return condError;
}
mEngineTransmitting = false;
if(!CodecConfigToWebRTCCodec(codecConfig,cinst))
{
CSFLogError(logTag,"%s CodecConfig to WebRTC Codec Failed ",__FUNCTION__);
@ -413,12 +382,9 @@ WebrtcAudioConduit::ConfigureSendMediaCodec(const AudioCodecConfig* codecConfig)
}
#endif
//Let's Send Transport State-machine on the Engine
if(mPtrVoEBase->StartSend(mChannel) == -1)
{
error = mPtrVoEBase->LastError();
CSFLogError(logTag, "%s StartSend failed %d", __FUNCTION__, error);
return kMediaConduitUnknownError;
condError = StartTransmitting();
if (condError != kMediaConduitNoError) {
return condError;
}
//Copy the applied config for future reference.
@ -431,7 +397,6 @@ WebrtcAudioConduit::ConfigureSendMediaCodec(const AudioCodecConfig* codecConfig)
codecConfig->mChannels,
codecConfig->mRate);
mEngineTransmitting = true;
return kMediaConduitNoError;
}
@ -446,25 +411,11 @@ WebrtcAudioConduit::ConfigureRecvMediaCodecs(
// Are we receiving already? If so, stop receiving and playout
// since we can't apply new recv codec when the engine is playing.
if(mEngineReceiving)
{
CSFLogDebug(logTag, "%s Engine Already Receiving. Attemping to Stop ", __FUNCTION__);
// AudioEngine doesn't fail fatally on stopping reception. Ref:voe_errors.h.
// hence we need not be strict in failing here on errors
mPtrVoEBase->StopReceive(mChannel);
CSFLogDebug(logTag, "%s Attemping to Stop playout ", __FUNCTION__);
if(mPtrVoEBase->StopPlayout(mChannel) == -1)
{
if( mPtrVoEBase->LastError() == VE_CANNOT_STOP_PLAYOUT)
{
CSFLogDebug(logTag, "%s Stop-Playout Failed %d", __FUNCTION__, mPtrVoEBase->LastError());
return kMediaConduitPlayoutError;
}
}
condError = StopReceiving();
if (condError != kMediaConduitNoError) {
return condError;
}
mEngineReceiving = false;
if(codecConfigList.empty())
{
CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__);
@ -517,25 +468,11 @@ WebrtcAudioConduit::ConfigureRecvMediaCodecs(
}
//If we are here, atleast one codec should have been set
if(mPtrVoEBase->StartReceive(mChannel) == -1)
{
error = mPtrVoEBase->LastError();
CSFLogError(logTag , "%s StartReceive Failed %d ",__FUNCTION__, error);
if(error == VE_RECV_SOCKET_ERROR)
{
return kMediaConduitSocketError;
}
return kMediaConduitUnknownError;
condError = StartReceiving();
if (condError != kMediaConduitNoError) {
return condError;
}
if(mPtrVoEBase->StartPlayout(mChannel) == -1)
{
CSFLogError(logTag, "%s Starting playout Failed", __FUNCTION__);
return kMediaConduitPlayoutError;
}
//we should be good here for setting this.
mEngineReceiving = true;
DumpCodecDB();
return kMediaConduitNoError;
}
@ -788,64 +725,145 @@ WebrtcAudioConduit::ReceivedRTCPPacket(const void *data, int len)
return kMediaConduitNoError;
}
//WebRTC::RTP Callback Implementation
int WebrtcAudioConduit::SendPacket(int channel, const void* data, int len)
MediaConduitErrorCode
WebrtcAudioConduit::StopTransmitting()
{
CSFLogDebug(logTag, "%s : channel %d %s", __FUNCTION__, channel,
(mEngineReceiving && mOtherDirection) ? "(using mOtherDirection)" : "");
if (mEngineReceiving)
if(mEngineTransmitting)
{
if (mOtherDirection)
CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
if(mPtrVoEBase->StopSend(mChannel) == -1)
{
return mOtherDirection->SendPacket(channel, data, len);
}
CSFLogDebug(logTag, "%s : Asked to send RTP without an RTP sender on channel %d",
__FUNCTION__, channel);
return -1;
} else {
#ifdef MOZILLA_INTERNAL_API
if (PR_LOG_TEST(GetLatencyLog(), PR_LOG_DEBUG)) {
if (mProcessing.Length() > 0) {
TimeStamp started = mProcessing[0].mTimeStamp;
mProcessing.RemoveElementAt(0);
mProcessing.RemoveElementAt(0); // 20ms packetization! Could automate this by watching sizes
TimeDuration t = TimeStamp::Now() - started;
int64_t delta = t.ToMilliseconds();
LogTime(AsyncLatencyLogger::AudioSendRTP, ((uint64_t) this), delta);
}
}
#endif
if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK))
{
CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
return len;
} else {
CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
return -1;
CSFLogError(logTag, "%s StopSend() Failed %d ", __FUNCTION__,
mPtrVoEBase->LastError());
return kMediaConduitUnknownError;
}
mEngineTransmitting = false;
}
return kMediaConduitNoError;
}
int WebrtcAudioConduit::SendRTCPPacket(int channel, const void* data, int len)
MediaConduitErrorCode
WebrtcAudioConduit::StartTransmitting()
{
if (!mEngineTransmitting) {
//Let's Send Transport State-machine on the Engine
if(mPtrVoEBase->StartSend(mChannel) == -1)
{
int error = mPtrVoEBase->LastError();
CSFLogError(logTag, "%s StartSend failed %d", __FUNCTION__, error);
return kMediaConduitUnknownError;
}
mEngineTransmitting = true;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcAudioConduit::StopReceiving()
{
if(mEngineReceiving)
{
CSFLogDebug(logTag, "%s Engine Already Receiving. Attemping to Stop ", __FUNCTION__);
// AudioEngine doesn't fail fatally on stopping reception. Ref:voe_errors.h.
// hence we need not be strict in failing here on errors
mPtrVoEBase->StopReceive(mChannel);
CSFLogDebug(logTag, "%s Attemping to Stop playout ", __FUNCTION__);
if(mPtrVoEBase->StopPlayout(mChannel) == -1)
{
if( mPtrVoEBase->LastError() == VE_CANNOT_STOP_PLAYOUT)
{
CSFLogDebug(logTag, "%s Stop-Playout Failed %d", __FUNCTION__, mPtrVoEBase->LastError());
return kMediaConduitPlayoutError;
}
}
mEngineReceiving = false;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcAudioConduit::StartReceiving()
{
if (!mEngineReceiving) {
if(mPtrVoEBase->StartReceive(mChannel) == -1)
{
int error = mPtrVoEBase->LastError();
CSFLogError(logTag , "%s StartReceive Failed %d ",__FUNCTION__, error);
if(error == VE_RECV_SOCKET_ERROR)
{
return kMediaConduitSocketError;
}
return kMediaConduitUnknownError;
}
if(mPtrVoEBase->StartPlayout(mChannel) == -1)
{
CSFLogError(logTag, "%s Starting playout Failed", __FUNCTION__);
return kMediaConduitPlayoutError;
}
mEngineReceiving = true;
}
return kMediaConduitNoError;
}
//WebRTC::RTP Callback Implementation
// Called on AudioGUM or MSG thread
int WebrtcAudioConduit::SendPacket(int channel, const void* data, int len)
{
CSFLogDebug(logTag, "%s : channel %d", __FUNCTION__, channel);
if (mEngineTransmitting)
{
if (mOtherDirection)
{
return mOtherDirection->SendRTCPPacket(channel, data, len);
#ifdef MOZILLA_INTERNAL_API
if (PR_LOG_TEST(GetLatencyLog(), PR_LOG_DEBUG)) {
if (mProcessing.Length() > 0) {
TimeStamp started = mProcessing[0].mTimeStamp;
mProcessing.RemoveElementAt(0);
mProcessing.RemoveElementAt(0); // 20ms packetization! Could automate this by watching sizes
TimeDuration t = TimeStamp::Now() - started;
int64_t delta = t.ToMilliseconds();
LogTime(AsyncLatencyLogger::AudioSendRTP, ((uint64_t) this), delta);
}
}
#endif
ReentrantMonitorAutoEnter enter(mTransportMonitor);
if(mTransmitterTransport &&
(mTransmitterTransport->SendRtpPacket(data, len) == NS_OK))
{
CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
return len;
} else {
CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
return -1;
}
}
// Called on WebRTC Process thread and perhaps others
int WebrtcAudioConduit::SendRTCPPacket(int channel, const void* data, int len)
{
CSFLogDebug(logTag, "%s : channel %d , len %d, first rtcp = %u ",
__FUNCTION__,
channel,
len,
static_cast<unsigned>(((uint8_t *) data)[1]));
// We come here if we have only one pipeline/conduit setup,
// such as for unidirectional streams.
// We also end up here if we are receiving
if(mTransport && mTransport->SendRtcpPacket(data, len) == NS_OK)
ReentrantMonitorAutoEnter enter(mTransportMonitor);
if(mReceiverTransport &&
mReceiverTransport->SendRtcpPacket(data, len) == NS_OK)
{
// Might be a sender report, might be a receiver report, we don't know.
CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
return len;
} else if(mTransmitterTransport &&
(mTransmitterTransport->SendRtcpPacket(data, len) == NS_OK)) {
CSFLogDebug(logTag, "%s Sent RTCP Packet (sender report) ", __FUNCTION__);
return len;
} else {
CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__);
return -1;
@ -1011,7 +1029,6 @@ WebrtcAudioConduit::ValidateCodecConfig(const AudioCodecConfig* codecInfo,
if(codecAppliedAlready)
{
CSFLogDebug(logTag, "%s Codec %s Already Applied ", __FUNCTION__, codecInfo->mName.c_str());
return kMediaConduitCodecInUse;
}
return kMediaConduitNoError;
}

Просмотреть файл

@ -64,6 +64,11 @@ public:
*/
virtual MediaConduitErrorCode ReceivedRTCPPacket(const void *data, int len) MOZ_OVERRIDE;
virtual MediaConduitErrorCode StopTransmitting() MOZ_OVERRIDE;
virtual MediaConduitErrorCode StartTransmitting() MOZ_OVERRIDE;
virtual MediaConduitErrorCode StopReceiving() MOZ_OVERRIDE;
virtual MediaConduitErrorCode StartReceiving() MOZ_OVERRIDE;
/**
* Function to configure send codec for the audio session
* @param sendSessionConfig: CodecConfiguration
@ -94,7 +99,10 @@ public:
* Register External Transport to this Conduit. RTP and RTCP frames from the VoiceEngine
* shall be passed to the registered transport for transporting externally.
*/
virtual MediaConduitErrorCode AttachTransport(mozilla::RefPtr<TransportInterface> aTransport) MOZ_OVERRIDE;
virtual MediaConduitErrorCode SetTransmitterTransport(mozilla::RefPtr<TransportInterface> aTransport) MOZ_OVERRIDE;
virtual MediaConduitErrorCode SetReceiverTransport(mozilla::RefPtr<TransportInterface> aTransport) MOZ_OVERRIDE;
/**
* Function to deliver externally captured audio sample for encoding and transport
* @param audioData [in]: Pointer to array containing a frame of audio
@ -154,10 +162,10 @@ public:
virtual uint64_t CodecPluginID() MOZ_OVERRIDE { return 0; }
WebrtcAudioConduit():
mOtherDirection(nullptr),
mShutDown(false),
mVoiceEngine(nullptr),
mTransport(nullptr),
mTransportMonitor("WebrtcAudioConduit"),
mTransmitterTransport(nullptr),
mReceiverTransport(nullptr),
mEngineTransmitting(false),
mEngineReceiving(false),
mChannel(-1),
@ -173,7 +181,7 @@ public:
virtual ~WebrtcAudioConduit();
MediaConduitErrorCode Init(WebrtcAudioConduit *other);
MediaConduitErrorCode Init();
int GetChannel() { return mChannel; }
webrtc::VoiceEngine* GetVoiceEngine() { return mVoiceEngine; }
@ -242,17 +250,10 @@ private:
//Utility function to dump recv codec database
void DumpCodecDB() const;
// The two sides of a send/receive pair of conduits each keep a pointer to the other.
// The also share a single VoiceEngine and mChannel. Shutdown must be coordinated
// carefully to avoid double-freeing or accessing after one frees.
WebrtcAudioConduit* mOtherDirection;
// Other side has shut down our channel and related items already
bool mShutDown;
// These are shared by both directions. They're released by the last
// conduit to die
webrtc::VoiceEngine* mVoiceEngine;
mozilla::RefPtr<TransportInterface> mTransport;
mozilla::ReentrantMonitor mTransportMonitor;
mozilla::RefPtr<TransportInterface> mTransmitterTransport;
mozilla::RefPtr<TransportInterface> mReceiverTransport;
ScopedCustomReleasePtr<webrtc::VoENetwork> mPtrVoENetwork;
ScopedCustomReleasePtr<webrtc::VoEBase> mPtrVoEBase;
ScopedCustomReleasePtr<webrtc::VoECodec> mPtrVoECodec;
@ -262,8 +263,8 @@ private:
ScopedCustomReleasePtr<webrtc::VoERTP_RTCP> mPtrVoERTP_RTCP;
ScopedCustomReleasePtr<webrtc::VoERTP_RTCP> mPtrRTP;
//engine states of our interets
bool mEngineTransmitting; // If true => VoiceEngine Send-subsystem is up
bool mEngineReceiving; // If true => VoiceEngine Receive-subsystem is up
mozilla::Atomic<bool> mEngineTransmitting; // If true => VoiceEngine Send-subsystem is up
mozilla::Atomic<bool> mEngineReceiving; // If true => VoiceEngine Receive-subsystem is up
// and playout is enabled
// Keep track of each inserted RTP block and the time it was inserted
// so we can estimate the clock time for a specific TimeStamp coming out

Просмотреть файл

@ -152,14 +152,34 @@ public:
*/
virtual MediaConduitErrorCode ReceivedRTCPPacket(const void *data, int len) = 0;
virtual MediaConduitErrorCode StopTransmitting() = 0;
virtual MediaConduitErrorCode StartTransmitting() = 0;
virtual MediaConduitErrorCode StopReceiving() = 0;
virtual MediaConduitErrorCode StartReceiving() = 0;
/**
* Function to attach Transport end-point of the Media conduit.
* Function to attach transmitter transport end-point of the Media conduit.
* @param aTransport: Reference to the concrete teansport implementation
* When nullptr, unsets the transmitter transport endpoint.
* Note: Multiple invocations of this call , replaces existing transport with
* with the new one.
* Note: This transport is used for RTP, and RTCP if no receiver transport is
* set. In the future, we should ensure that RTCP sender reports use this
* regardless of whether the receiver transport is set.
*/
virtual MediaConduitErrorCode AttachTransport(RefPtr<TransportInterface> aTransport) = 0;
virtual MediaConduitErrorCode SetTransmitterTransport(RefPtr<TransportInterface> aTransport) = 0;
/**
* Function to attach receiver transport end-point of the Media conduit.
* @param aTransport: Reference to the concrete teansport implementation
* When nullptr, unsets the receiver transport endpoint.
* Note: Multiple invocations of this call , replaces existing transport with
* with the new one.
* Note: This transport is used for RTCP.
* Note: In the future, we should avoid using this for RTCP sender reports.
*/
virtual MediaConduitErrorCode SetReceiverTransport(RefPtr<TransportInterface> aTransport) = 0;
virtual bool SetLocalSSRC(unsigned int ssrc) = 0;
virtual bool GetLocalSSRC(unsigned int* ssrc) = 0;
@ -234,8 +254,7 @@ public:
* return: Concrete VideoSessionConduitObject or nullptr in the case
* of failure
*/
static RefPtr<VideoSessionConduit> Create(VideoSessionConduit *aOther,
bool receiving);
static RefPtr<VideoSessionConduit> Create();
enum FrameRequestType
{
@ -360,7 +379,7 @@ public:
* return: Concrete AudioSessionConduitObject or nullptr in the case
* of failure
*/
static mozilla::RefPtr<AudioSessionConduit> Create(AudioSessionConduit *aOther);
static mozilla::RefPtr<AudioSessionConduit> Create();
virtual ~AudioSessionConduit() {}

Просмотреть файл

@ -43,14 +43,13 @@ const unsigned int WebrtcVideoConduit::CODEC_PLNAME_SIZE = 32;
* Factory Method for VideoConduit
*/
mozilla::RefPtr<VideoSessionConduit>
VideoSessionConduit::Create(VideoSessionConduit *aOther,
bool receiving)
VideoSessionConduit::Create()
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
CSFLogDebug(logTag, "%s ", __FUNCTION__);
WebrtcVideoConduit* obj = new WebrtcVideoConduit();
if(obj->Init(static_cast<WebrtcVideoConduit*>(aOther), receiving) != kMediaConduitNoError)
if(obj->Init() != kMediaConduitNoError)
{
CSFLogError(logTag, "%s VideoConduit Init Failed ", __FUNCTION__);
delete obj;
@ -61,10 +60,10 @@ VideoSessionConduit::Create(VideoSessionConduit *aOther,
}
WebrtcVideoConduit::WebrtcVideoConduit():
mOtherDirection(nullptr),
mShutDown(false),
mVideoEngine(nullptr),
mTransport(nullptr),
mTransportMonitor("WebrtcVideoConduit"),
mTransmitterTransport(nullptr),
mReceiverTransport(nullptr),
mRenderer(nullptr),
mPtrExtCapture(nullptr),
mEngineTransmitting(false),
@ -77,6 +76,7 @@ WebrtcVideoConduit::WebrtcVideoConduit():
mReceivingWidth(640),
mReceivingHeight(480),
mSendingFramerate(DEFAULT_VIDEO_MAX_FRAMERATE),
mNumReceivingStreams(1),
mVideoLatencyTestEnable(false),
mVideoLatencyAvg(0),
mMinBitrate(200),
@ -101,13 +101,9 @@ WebrtcVideoConduit::~WebrtcVideoConduit()
//Deal with External Capturer
if(mPtrViECapture)
{
if (!mShutDown) {
mPtrViECapture->DisconnectCaptureDevice(mCapId);
mPtrViECapture->ReleaseCaptureDevice(mCapId);
mPtrExtCapture = nullptr;
if (mOtherDirection)
mOtherDirection->mPtrExtCapture = nullptr;
}
mPtrViECapture->DisconnectCaptureDevice(mCapId);
mPtrViECapture->ReleaseCaptureDevice(mCapId);
mPtrExtCapture = nullptr;
}
if (mPtrExtCodec) {
@ -118,60 +114,45 @@ WebrtcVideoConduit::~WebrtcVideoConduit()
//Deal with External Renderer
if(mPtrViERender)
{
if (!mShutDown) {
if(mRenderer) {
mPtrViERender->StopRender(mChannel);
}
mPtrViERender->RemoveRenderer(mChannel);
if(mRenderer) {
mPtrViERender->StopRender(mChannel);
}
mPtrViERender->RemoveRenderer(mChannel);
}
//Deal with the transport
if(mPtrViENetwork)
{
if (!mShutDown) {
mPtrViENetwork->DeregisterSendTransport(mChannel);
}
mPtrViENetwork->DeregisterSendTransport(mChannel);
}
if(mPtrViEBase)
{
if (!mShutDown) {
mPtrViEBase->StopSend(mChannel);
mPtrViEBase->StopReceive(mChannel);
SyncTo(nullptr);
mPtrViEBase->DeleteChannel(mChannel);
}
mPtrViEBase->StopSend(mChannel);
mPtrViEBase->StopReceive(mChannel);
SyncTo(nullptr);
mPtrViEBase->DeleteChannel(mChannel);
}
if (mOtherDirection)
{
// mOtherDirection owns these now!
mOtherDirection->mOtherDirection = nullptr;
// let other side we terminated the channel
mOtherDirection->mShutDown = true;
mVideoEngine = nullptr;
} else {
// mVideoCodecStat has a back-ptr to mPtrViECodec that must be released first
if (mVideoCodecStat) {
mVideoCodecStat->EndOfCallStats();
}
mVideoCodecStat = nullptr;
// We can't delete the VideoEngine until all these are released!
// And we can't use a Scoped ptr, since the order is arbitrary
mPtrViEBase = nullptr;
mPtrViECapture = nullptr;
mPtrViECodec = nullptr;
mPtrViENetwork = nullptr;
mPtrViERender = nullptr;
mPtrRTP = nullptr;
mPtrExtCodec = nullptr;
// mVideoCodecStat has a back-ptr to mPtrViECodec that must be released first
if (mVideoCodecStat) {
mVideoCodecStat->EndOfCallStats();
}
mVideoCodecStat = nullptr;
// We can't delete the VideoEngine until all these are released!
// And we can't use a Scoped ptr, since the order is arbitrary
mPtrViEBase = nullptr;
mPtrViECapture = nullptr;
mPtrViECodec = nullptr;
mPtrViENetwork = nullptr;
mPtrViERender = nullptr;
mPtrRTP = nullptr;
mPtrExtCodec = nullptr;
// only one opener can call Delete. Have it be the last to close.
if(mVideoEngine)
{
webrtc::VideoEngine::Delete(mVideoEngine);
}
// only one opener can call Delete. Have it be the last to close.
if(mVideoEngine)
{
webrtc::VideoEngine::Delete(mVideoEngine);
}
}
@ -288,10 +269,9 @@ bool WebrtcVideoConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
* Performs initialization of the MANDATORY components of the Video Engine
*/
MediaConduitErrorCode
WebrtcVideoConduit::Init(WebrtcVideoConduit *other,
bool receiving)
WebrtcVideoConduit::Init()
{
CSFLogDebug(logTag, "%s this=%p other=%p", __FUNCTION__, this, other);
CSFLogDebug(logTag, "%s this=%p", __FUNCTION__, this);
#ifdef MOZILLA_INTERNAL_API
// already know we must be on MainThread barring unit test weirdness
@ -328,36 +308,26 @@ WebrtcVideoConduit::Init(WebrtcVideoConduit *other,
}
#endif
if (other) {
MOZ_ASSERT(!other->mOtherDirection);
other->mOtherDirection = this;
mOtherDirection = other;
// only one can call ::Create()/GetVideoEngine()
MOZ_ASSERT(other->mVideoEngine);
mVideoEngine = other->mVideoEngine;
} else {
#ifdef MOZ_WIDGET_ANDROID
// get the JVM
JavaVM *jvm = jsjni_GetVM();
// get the JVM
JavaVM *jvm = jsjni_GetVM();
if (webrtc::VideoEngine::SetAndroidObjects(jvm) != 0) {
CSFLogError(logTag, "%s: could not set Android objects", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if (webrtc::VideoEngine::SetAndroidObjects(jvm) != 0) {
CSFLogError(logTag, "%s: could not set Android objects", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
#endif
// Per WebRTC APIs below function calls return nullptr on failure
mVideoEngine = webrtc::VideoEngine::Create();
if(!mVideoEngine)
{
CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
EnableWebRtcLog();
// Per WebRTC APIs below function calls return nullptr on failure
mVideoEngine = webrtc::VideoEngine::Create();
if(!mVideoEngine)
{
CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
EnableWebRtcLog();
if( !(mPtrViEBase = ViEBase::GetInterface(mVideoEngine)))
{
CSFLogError(logTag, "%s Unable to get video base interface ", __FUNCTION__);
@ -408,71 +378,68 @@ WebrtcVideoConduit::Init(WebrtcVideoConduit *other,
return kMediaConduitSessionNotInited;
}
if (other) {
mChannel = other->mChannel;
mPtrExtCapture = other->mPtrExtCapture;
mCapId = other->mCapId;
} else {
CSFLogDebug(logTag, "%s Engine Created: Init'ng the interfaces ",__FUNCTION__);
CSFLogDebug(logTag, "%s Engine Created: Init'ng the interfaces ",__FUNCTION__);
if(mPtrViEBase->Init() == -1)
{
CSFLogError(logTag, " %s Video Engine Init Failed %d ",__FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitSessionNotInited;
}
if(mPtrViEBase->CreateChannel(mChannel) == -1)
{
CSFLogError(logTag, " %s Channel creation Failed %d ",__FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitChannelError;
}
if(mPtrViENetwork->RegisterSendTransport(mChannel, *this) == -1)
{
CSFLogError(logTag, "%s ViENetwork Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitTransportRegistrationFail;
}
if(mPtrViECapture->AllocateExternalCaptureDevice(mCapId,
mPtrExtCapture) == -1)
{
CSFLogError(logTag, "%s Unable to Allocate capture module: %d ",
__FUNCTION__, mPtrViEBase->LastError());
return kMediaConduitCaptureError;
}
if(mPtrViECapture->ConnectCaptureDevice(mCapId,mChannel) == -1)
{
CSFLogError(logTag, "%s Unable to Connect capture module: %d ",
__FUNCTION__,mPtrViEBase->LastError());
return kMediaConduitCaptureError;
}
// Set up some parameters, per juberti. Set MTU.
if(mPtrViENetwork->SetMTU(mChannel, 1200) != 0)
{
CSFLogError(logTag, "%s MTU Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitMTUError;
}
// Turn on RTCP and loss feedback reporting.
if(mPtrRTP->SetRTCPStatus(mChannel, webrtc::kRtcpCompound_RFC4585) != 0)
{
CSFLogError(logTag, "%s RTCPStatus Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitRTCPStatusError;
}
if(mPtrViEBase->Init() == -1)
{
CSFLogError(logTag, " %s Video Engine Init Failed %d ",__FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitSessionNotInited;
}
if (receiving) {
if (mPtrViERender->AddRenderer(mChannel,
webrtc::kVideoI420,
(webrtc::ExternalRenderer*) this) == -1) {
CSFLogError(logTag, "%s Failed to added external renderer ", __FUNCTION__);
return kMediaConduitInvalidRenderer;
}
if(mPtrViEBase->CreateChannel(mChannel) == -1)
{
CSFLogError(logTag, " %s Channel creation Failed %d ",__FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitChannelError;
}
if(mPtrViENetwork->RegisterSendTransport(mChannel, *this) == -1)
{
CSFLogError(logTag, "%s ViENetwork Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitTransportRegistrationFail;
}
if(mPtrViECapture->AllocateExternalCaptureDevice(mCapId,
mPtrExtCapture) == -1)
{
CSFLogError(logTag, "%s Unable to Allocate capture module: %d ",
__FUNCTION__, mPtrViEBase->LastError());
return kMediaConduitCaptureError;
}
if(mPtrViECapture->ConnectCaptureDevice(mCapId,mChannel) == -1)
{
CSFLogError(logTag, "%s Unable to Connect capture module: %d ",
__FUNCTION__,mPtrViEBase->LastError());
return kMediaConduitCaptureError;
}
// Set up some parameters, per juberti. Set MTU.
if(mPtrViENetwork->SetMTU(mChannel, 1200) != 0)
{
CSFLogError(logTag, "%s MTU Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitMTUError;
}
// Turn on RTCP and loss feedback reporting.
if(mPtrRTP->SetRTCPStatus(mChannel, webrtc::kRtcpCompound_RFC4585) != 0)
{
CSFLogError(logTag, "%s RTCPStatus Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitRTCPStatusError;
}
if (mPtrViERender->AddRenderer(mChannel,
webrtc::kVideoI420,
(webrtc::ExternalRenderer*) this) == -1) {
CSFLogError(logTag, "%s Failed to added external renderer ", __FUNCTION__);
return kMediaConduitInvalidRenderer;
}
if (mLoadManager) {
mPtrViEBase->RegisterCpuOveruseObserver(mChannel, mLoadManager);
mPtrViEBase->SetLoadManager(mLoadManager);
}
CSFLogError(logTag, "%s Initialization Done", __FUNCTION__);
@ -491,17 +458,9 @@ WebrtcVideoConduit::SyncTo(WebrtcAudioConduit *aConduit)
mPtrViEBase->SetVoiceEngine(aConduit->GetVoiceEngine());
mPtrViEBase->ConnectAudioChannel(mChannel, aConduit->GetChannel());
// NOTE: this means the VideoConduit will keep the AudioConduit alive!
} else if ((mOtherDirection && mOtherDirection->mSyncedTo) || mSyncedTo) {
mPtrViEBase->DisconnectAudioChannel(mChannel);
mPtrViEBase->SetVoiceEngine(nullptr);
}
// Now manage the shared sync reference (ugly)
if (mSyncedTo || !mOtherDirection ) {
mSyncedTo = aConduit;
} else {
mOtherDirection->mSyncedTo = aConduit;
}
mSyncedTo = aConduit;
}
MediaConduitErrorCode
@ -517,21 +476,28 @@ WebrtcVideoConduit::AttachRenderer(mozilla::RefPtr<VideoRenderer> aVideoRenderer
return kMediaConduitInvalidRenderer;
}
//Start Rendering if we haven't already
if(!mRenderer)
// This function is called only from main, so we only need to protect against
// modifying mRenderer while any webrtc.org code is trying to use it.
bool wasRendering;
{
mRenderer = aVideoRenderer; // must be done before StartRender()
ReentrantMonitorAutoEnter enter(mTransportMonitor);
wasRendering = !!mRenderer;
mRenderer = aVideoRenderer;
// Make sure the renderer knows the resolution
mRenderer->FrameSizeChange(mReceivingWidth,
mReceivingHeight,
mNumReceivingStreams);
}
if (!wasRendering) {
if(mPtrViERender->StartRender(mChannel) == -1)
{
CSFLogError(logTag, "%s Starting the Renderer Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
ReentrantMonitorAutoEnter enter(mTransportMonitor);
mRenderer = nullptr;
return kMediaConduitRendererFail;
}
} else {
//Assign the new renderer - overwrites if there is already one
mRenderer = aVideoRenderer;
}
return kMediaConduitNoError;
@ -540,24 +506,36 @@ WebrtcVideoConduit::AttachRenderer(mozilla::RefPtr<VideoRenderer> aVideoRenderer
void
WebrtcVideoConduit::DetachRenderer()
{
if(mRenderer)
{
mPtrViERender->StopRender(mChannel);
mRenderer = nullptr;
ReentrantMonitorAutoEnter enter(mTransportMonitor);
if(mRenderer)
{
mRenderer = nullptr;
}
}
mPtrViERender->StopRender(mChannel);
}
MediaConduitErrorCode
WebrtcVideoConduit::AttachTransport(mozilla::RefPtr<TransportInterface> aTransport)
WebrtcVideoConduit::SetTransmitterTransport(mozilla::RefPtr<TransportInterface> aTransport)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
if(!aTransport)
{
CSFLogError(logTag, "%s NULL Transport", __FUNCTION__);
return kMediaConduitInvalidTransport;
}
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// set the transport
mTransport = aTransport;
mTransmitterTransport = aTransport;
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcVideoConduit::SetReceiverTransport(mozilla::RefPtr<TransportInterface> aTransport)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
ReentrantMonitorAutoEnter enter(mTransportMonitor);
// set the transport
mReceiverTransport = aTransport;
return kMediaConduitNoError;
}
@ -587,26 +565,11 @@ WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
if(CheckCodecsForMatch(mCurSendCodecConfig, codecConfig))
{
CSFLogDebug(logTag, "%s Codec has been applied already ", __FUNCTION__);
return kMediaConduitCodecInUse;
}
//transmitting already ?
if(mEngineTransmitting)
{
CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
if(mPtrViEBase->StopSend(mChannel) == -1)
{
CSFLogError(logTag, "%s StopSend() Failed %d ",__FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitUnknownError;
}
mEngineTransmitting = false;
}
if (mLoadManager) {
mPtrViEBase->RegisterCpuOveruseObserver(mChannel, mLoadManager);
mPtrViEBase->SetLoadManager(mLoadManager);
condError = StopTransmitting();
if (condError != kMediaConduitNoError) {
return condError;
}
if (mExternalSendCodec &&
@ -687,11 +650,9 @@ WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
}
}
if(mPtrViEBase->StartSend(mChannel) == -1)
{
CSFLogError(logTag, "%s Start Send Error %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitUnknownError;
condError = StartTransmitting();
if (condError != kMediaConduitNoError) {
return condError;
}
//Copy the applied config for future reference.
@ -701,8 +662,6 @@ WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
mPtrRTP->SetRembStatus(mChannel, true, false);
// by now we should be successfully started the transmission
mEngineTransmitting = true;
return kMediaConduitNoError;
}
@ -712,32 +671,14 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs(
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
MediaConduitErrorCode condError = kMediaConduitNoError;
int error = 0; //webrtc engine errors
bool success = false;
std::string payloadName;
// are we receiving already? If so, stop receiving and playout
// since we can't apply new recv codec when the engine is playing.
if(mEngineReceiving)
{
CSFLogDebug(logTag, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__);
if(mPtrViEBase->StopReceive(mChannel) == -1)
{
error = mPtrViEBase->LastError();
if(error == kViEBaseUnknownError)
{
CSFLogDebug(logTag, "%s StopReceive() Success ", __FUNCTION__);
mEngineReceiving = false;
} else {
CSFLogError(logTag, "%s StopReceive() Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitUnknownError;
}
}
condError = StopReceiving();
if (condError != kMediaConduitNoError) {
return condError;
}
mEngineReceiving = false;
if(codecConfigList.empty())
{
CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__);
@ -777,7 +718,6 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs(
webrtc::VideoCodec video_codec;
mEngineReceiving = false;
memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
if (mExternalRecvCodec &&
@ -907,19 +847,13 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs(
}
mUsingNackBasic = use_nack_basic;
//Start Receive on the video engine
if(mPtrViEBase->StartReceive(mChannel) == -1)
{
error = mPtrViEBase->LastError();
CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, error);
return kMediaConduitUnknownError;
condError = StartReceiving();
if (condError != kMediaConduitNoError) {
return condError;
}
// by now we should be successfully started the reception
mPtrRTP->SetRembStatus(mChannel, false, true);
mEngineReceiving = true;
DumpCodecDB();
return kMediaConduitNoError;
}
@ -1217,52 +1151,125 @@ WebrtcVideoConduit::ReceivedRTCPPacket(const void *data, int len)
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcVideoConduit::StopTransmitting()
{
if(mEngineTransmitting)
{
CSFLogDebug(logTag, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
if(mPtrViEBase->StopSend(mChannel) == -1)
{
CSFLogError(logTag, "%s StopSend() Failed %d ",__FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitUnknownError;
}
mEngineTransmitting = false;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcVideoConduit::StartTransmitting()
{
if (!mEngineTransmitting) {
if(mPtrViEBase->StartSend(mChannel) == -1)
{
CSFLogError(logTag, "%s Start Send Error %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitUnknownError;
}
mEngineTransmitting = true;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcVideoConduit::StopReceiving()
{
// Are we receiving already? If so, stop receiving and playout
// since we can't apply new recv codec when the engine is playing.
if(mEngineReceiving)
{
CSFLogDebug(logTag, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__);
if(mPtrViEBase->StopReceive(mChannel) == -1)
{
int error = mPtrViEBase->LastError();
if(error == kViEBaseUnknownError)
{
CSFLogDebug(logTag, "%s StopReceive() Success ", __FUNCTION__);
} else {
CSFLogError(logTag, "%s StopReceive() Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitUnknownError;
}
}
mEngineReceiving = false;
}
return kMediaConduitNoError;
}
MediaConduitErrorCode
WebrtcVideoConduit::StartReceiving()
{
if (!mEngineReceiving) {
CSFLogDebug(logTag, "%s Attemping to start... ", __FUNCTION__);
//Start Receive on the video engine
if(mPtrViEBase->StartReceive(mChannel) == -1)
{
int error = mPtrViEBase->LastError();
CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, error);
return kMediaConduitUnknownError;
}
mEngineReceiving = true;
}
return kMediaConduitNoError;
}
//WebRTC::RTP Callback Implementation
// Called on MSG thread
int WebrtcVideoConduit::SendPacket(int channel, const void* data, int len)
{
CSFLogDebug(logTag, "%s : channel %d len %d %s", __FUNCTION__, channel, len,
(mEngineReceiving && mOtherDirection) ? "(using mOtherDirection)" : "");
CSFLogDebug(logTag, "%s : channel %d len %d", __FUNCTION__, channel, len);
if (mEngineReceiving)
ReentrantMonitorAutoEnter enter(mTransportMonitor);
if(mTransmitterTransport &&
(mTransmitterTransport->SendRtpPacket(data, len) == NS_OK))
{
if (mOtherDirection)
{
return mOtherDirection->SendPacket(channel, data, len);
}
CSFLogDebug(logTag, "%s : Asked to send RTP without an RTP sender on channel %d",
__FUNCTION__, channel);
return -1;
CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
return len;
} else {
if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK))
{
CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
return len;
} else {
CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
return -1;
}
CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
return -1;
}
}
// Called from multiple threads including webrtc Process thread
int WebrtcVideoConduit::SendRTCPPacket(int channel, const void* data, int len)
{
CSFLogDebug(logTag, "%s : channel %d , len %d ", __FUNCTION__, channel,len);
if (mEngineTransmitting)
{
if (mOtherDirection)
{
return mOtherDirection->SendRTCPPacket(channel, data, len);
}
}
// We come here if we have only one pipeline/conduit setup,
// such as for unidirectional streams.
// We also end up here if we are receiving
if(mTransport && mTransport->SendRtcpPacket(data, len) == NS_OK)
ReentrantMonitorAutoEnter enter(mTransportMonitor);
if(mReceiverTransport &&
mReceiverTransport->SendRtcpPacket(data, len) == NS_OK)
{
// Might be a sender report, might be a receiver report, we don't know.
CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
return len;
} else if(mTransmitterTransport &&
(mTransmitterTransport->SendRtpPacket(data, len) == NS_OK)) {
CSFLogDebug(logTag, "%s Sent RTCP Packet (sender report) ", __FUNCTION__);
return len;
} else {
CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__);
return -1;
@ -1278,8 +1285,10 @@ WebrtcVideoConduit::FrameSizeChange(unsigned int width,
CSFLogDebug(logTag, "%s ", __FUNCTION__);
ReentrantMonitorAutoEnter enter(mTransportMonitor);
mReceivingWidth = width;
mReceivingHeight = height;
mNumReceivingStreams = numStreams;
if(mRenderer)
{
@ -1301,6 +1310,7 @@ WebrtcVideoConduit::DeliverFrame(unsigned char* buffer,
{
CSFLogDebug(logTag, "%s Buffer Size %d", __FUNCTION__, buffer_size);
ReentrantMonitorAutoEnter enter(mTransportMonitor);
if(mRenderer)
{
layers::Image* img = nullptr;
@ -1479,7 +1489,6 @@ WebrtcVideoConduit::ValidateCodecConfig(const VideoCodecConfig* codecInfo,
if(codecAppliedAlready)
{
CSFLogDebug(logTag, "%s Codec %s Already Applied ", __FUNCTION__, codecInfo->mName.c_str());
return kMediaConduitCodecInUse;
}
return kMediaConduitNoError;
}

Просмотреть файл

@ -94,6 +94,11 @@ public:
*/
virtual MediaConduitErrorCode ReceivedRTCPPacket(const void *data, int len) MOZ_OVERRIDE;
virtual MediaConduitErrorCode StopTransmitting() MOZ_OVERRIDE;
virtual MediaConduitErrorCode StartTransmitting() MOZ_OVERRIDE;
virtual MediaConduitErrorCode StopReceiving() MOZ_OVERRIDE;
virtual MediaConduitErrorCode StartReceiving() MOZ_OVERRIDE;
/**
* Function to configure send codec for the video session
* @param sendSessionConfig: CodecConfiguration
@ -120,7 +125,9 @@ public:
* Register Transport for this Conduit. RTP and RTCP frames from the VideoEngine
* shall be passed to the registered transport for transporting externally.
*/
virtual MediaConduitErrorCode AttachTransport(mozilla::RefPtr<TransportInterface> aTransport) MOZ_OVERRIDE;
virtual MediaConduitErrorCode SetTransmitterTransport(mozilla::RefPtr<TransportInterface> aTransport) MOZ_OVERRIDE;
virtual MediaConduitErrorCode SetReceiverTransport(mozilla::RefPtr<TransportInterface> aTransport) MOZ_OVERRIDE;
/**
* Function to select and change the encoding resolution based on incoming frame size
@ -233,8 +240,7 @@ public:
WebrtcVideoConduit();
virtual ~WebrtcVideoConduit();
MediaConduitErrorCode Init(WebrtcVideoConduit *other,
bool receiving);
MediaConduitErrorCode Init();
int GetChannel() { return mChannel; }
webrtc::VideoEngine* GetVideoEngine() { return mVideoEngine; }
@ -297,17 +303,10 @@ private:
// Video Latency Test averaging filter
void VideoLatencyUpdate(uint64_t new_sample);
// The two sides of a send/receive pair of conduits each keep a pointer to the other.
// They also share a single VideoEngine and mChannel. Shutdown must be coordinated
// carefully to avoid double-freeing or accessing after one frees.
WebrtcVideoConduit* mOtherDirection;
// The other side has shut down our mChannel and related items already
bool mShutDown;
// A few of these are shared by both directions. They're released by the last
// conduit to die.
webrtc::VideoEngine* mVideoEngine; // shared
mozilla::RefPtr<TransportInterface> mTransport;
webrtc::VideoEngine* mVideoEngine;
mozilla::ReentrantMonitor mTransportMonitor;
mozilla::RefPtr<TransportInterface> mTransmitterTransport;
mozilla::RefPtr<TransportInterface> mReceiverTransport;
mozilla::RefPtr<VideoRenderer> mRenderer;
ScopedCustomReleasePtr<webrtc::ViEBase> mPtrViEBase;
@ -318,11 +317,11 @@ private:
ScopedCustomReleasePtr<webrtc::ViERTP_RTCP> mPtrRTP;
ScopedCustomReleasePtr<webrtc::ViEExternalCodec> mPtrExtCodec;
webrtc::ViEExternalCapture* mPtrExtCapture; // shared
webrtc::ViEExternalCapture* mPtrExtCapture;
// Engine state we are concerned with.
bool mEngineTransmitting; //If true ==> Transmit Sub-system is up and running
bool mEngineReceiving; // if true ==> Receive Sus-sysmtem up and running
mozilla::Atomic<bool> mEngineTransmitting; //If true ==> Transmit Sub-system is up and running
mozilla::Atomic<bool> mEngineReceiving; // if true ==> Receive Sus-sysmtem up and running
int mChannel; // Video Channel for this conduit
int mCapId; // Capturer for this conduit
@ -333,6 +332,7 @@ private:
unsigned short mReceivingWidth;
unsigned short mReceivingHeight;
unsigned int mSendingFramerate;
unsigned short mNumReceivingStreams;
bool mVideoLatencyTestEnable;
uint64_t mVideoLatencyAvg;
uint32_t mMinBitrate;

Просмотреть файл

@ -65,6 +65,12 @@ MediaPipeline::~MediaPipeline() {
nsresult MediaPipeline::Init() {
ASSERT_ON_THREAD(main_thread_);
if (direction_ == RECEIVE) {
conduit_->SetReceiverTransport(transport_);
} else {
conduit_->SetTransmitterTransport(transport_);
}
RUN_ON_THREAD(sts_thread_,
WrapRunnable(
nsRefPtr<MediaPipeline>(this),
@ -76,7 +82,6 @@ nsresult MediaPipeline::Init() {
nsresult MediaPipeline::Init_s() {
ASSERT_ON_THREAD(sts_thread_);
conduit_->AttachTransport(transport_);
return AttachTransport_s();
}
@ -88,6 +93,7 @@ nsresult MediaPipeline::Init_s() {
void MediaPipeline::ShutdownTransport_s() {
ASSERT_ON_THREAD(sts_thread_);
MOZ_ASSERT(!stream_); // verifies that ShutdownMedia_m() has run
DetachTransport_s();
}
@ -1341,7 +1347,6 @@ NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) {
track_rate_,
0, // TODO(ekr@rtfm.com): better estimate of "capture" (really playout) delay
samples_length);
MOZ_ASSERT(samples_length < AUDIO_SAMPLE_BUFFER_MAX);
if (err != kMediaConduitNoError) {
// Insert silence on conduit/GIPS failure (extremely unlikely)
@ -1349,11 +1354,12 @@ NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) {
<< ") to return data @ " << played_ticks_
<< " (desired " << desired_time << " -> "
<< source_->StreamTimeToSeconds(desired_time) << ")");
MOZ_ASSERT(err == kMediaConduitNoError);
samples_length = (track_rate_/100)*sizeof(uint16_t); // if this is not enough we'll loop and provide more
memset(samples_data, '\0', samples_length);
}
MOZ_ASSERT(samples_length < AUDIO_SAMPLE_BUFFER_MAX);
MOZ_MTLOG(ML_DEBUG, "Audio conduit returned buffer of length "
<< samples_length);

Просмотреть файл

@ -110,7 +110,7 @@ class MediaPipeline : public sigslot::has_slots<> {
// PipelineTransport() will access this->sts_thread_; moved here for safety
transport_ = new PipelineTransport(this);
}
}
// Must be called on the STS thread. Must be called after ShutdownMedia_m().
void ShutdownTransport_s();
@ -119,6 +119,12 @@ class MediaPipeline : public sigslot::has_slots<> {
void ShutdownMedia_m() {
ASSERT_ON_THREAD(main_thread_);
if (direction_ == RECEIVE) {
conduit_->StopReceiving();
} else {
conduit_->StopTransmitting();
}
if (stream_) {
DetachMediaStream();
}

Просмотреть файл

@ -171,7 +171,6 @@ bool MediaPipelineFilter::CheckRtcpReport(const unsigned char* data,
if (ssrcs_must_match && ssrcs_must_not_match) {
MOZ_MTLOG(ML_ERROR, "Received an RTCP packet with SSRCs from "
"multiple m-lines! This is broken.");
return false;
}
// This is set if any ssrc matched

Просмотреть файл

@ -353,6 +353,19 @@ MediaPipelineFactory::CreateOrUpdateMediaPipeline(
RefPtr<MediaPipeline> pipeline =
stream->GetPipelineByTrackId_m(aTrack.GetTrackId());
if (pipeline && pipeline->level() != static_cast<int>(level)) {
MOZ_MTLOG(ML_WARNING, "Track " << aTrack.GetTrackId() <<
" has moved from level " << pipeline->level() <<
" to level " << level <<
". This requires re-creating the MediaPipeline.");
// Since we do not support changing the conduit on a pre-existing
// MediaPipeline
pipeline = nullptr;
stream->RemoveTrack(aTrack.GetTrackId());
stream->AddTrack(aTrack.GetTrackId());
}
if (pipeline) {
pipeline->UpdateTransport_m(level, rtpFlow, rtcpFlow, filter);
return NS_OK;
@ -366,11 +379,11 @@ MediaPipelineFactory::CreateOrUpdateMediaPipeline(
RefPtr<MediaSessionConduit> conduit;
if (aTrack.GetMediaType() == SdpMediaSection::kAudio) {
rv = CreateAudioConduit(aTrackPair, aTrack, &conduit);
rv = GetOrCreateAudioConduit(aTrackPair, aTrack, &conduit);
if (NS_FAILED(rv))
return rv;
} else if (aTrack.GetMediaType() == SdpMediaSection::kVideo) {
rv = CreateVideoConduit(aTrackPair, aTrack, &conduit);
rv = GetOrCreateVideoConduit(aTrackPair, aTrack, &conduit);
if (NS_FAILED(rv))
return rv;
} else {
@ -527,9 +540,10 @@ MediaPipelineFactory::CreateMediaPipelineSending(
}
nsresult
MediaPipelineFactory::CreateAudioConduit(const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
RefPtr<MediaSessionConduit>* aConduitp)
MediaPipelineFactory::GetOrCreateAudioConduit(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
RefPtr<MediaSessionConduit>* aConduitp)
{
if (!aTrack.GetNegotiatedDetails()) {
@ -540,18 +554,18 @@ MediaPipelineFactory::CreateAudioConduit(const JsepTrackPair& aTrackPair,
bool receiving =
aTrack.GetDirection() == JsepTrack::Direction::kJsepTrackReceiving;
RefPtr<AudioSessionConduit> conduit = AudioSessionConduit::Create(
static_cast<AudioSessionConduit*>(nullptr));
RefPtr<AudioSessionConduit> conduit =
mPCMedia->GetAudioConduit(aTrackPair.mLevel);
if (!conduit) {
MOZ_MTLOG(ML_ERROR, "Could not create audio conduit");
return NS_ERROR_FAILURE;
}
conduit = AudioSessionConduit::Create();
if (!conduit) {
MOZ_MTLOG(ML_ERROR, "Could not create audio conduit");
return NS_ERROR_FAILURE;
}
mPCMedia->AddConduit(aTrack.GetStreamId(),
aTrack.GetTrackId(),
receiving,
conduit);
mPCMedia->AddAudioConduit(aTrackPair.mLevel, conduit);
}
size_t numCodecs = aTrack.GetNegotiatedDetails()->GetCodecCount();
if (numCodecs == 0) {
@ -641,9 +655,10 @@ MediaPipelineFactory::CreateAudioConduit(const JsepTrackPair& aTrackPair,
}
nsresult
MediaPipelineFactory::CreateVideoConduit(const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
RefPtr<MediaSessionConduit>* aConduitp)
MediaPipelineFactory::GetOrCreateVideoConduit(
const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
RefPtr<MediaSessionConduit>* aConduitp)
{
if (!aTrack.GetNegotiatedDetails()) {
@ -654,18 +669,18 @@ MediaPipelineFactory::CreateVideoConduit(const JsepTrackPair& aTrackPair,
bool receiving =
aTrack.GetDirection() == JsepTrack::Direction::kJsepTrackReceiving;
RefPtr<VideoSessionConduit> conduit = VideoSessionConduit::Create(
static_cast<VideoSessionConduit*>(nullptr), receiving);
RefPtr<VideoSessionConduit> conduit =
mPCMedia->GetVideoConduit(aTrackPair.mLevel);
if (!conduit) {
MOZ_MTLOG(ML_ERROR, "Could not create video conduit");
return NS_ERROR_FAILURE;
}
conduit = VideoSessionConduit::Create();
if (!conduit) {
MOZ_MTLOG(ML_ERROR, "Could not create audio conduit");
return NS_ERROR_FAILURE;
}
mPCMedia->AddConduit(aTrack.GetStreamId(),
aTrack.GetTrackId(),
receiving,
conduit);
mPCMedia->AddVideoConduit(aTrackPair.mLevel, conduit);
}
size_t numCodecs = aTrack.GetNegotiatedDetails()->GetCodecCount();
if (numCodecs == 0) {

Просмотреть файл

@ -44,13 +44,13 @@ private:
nsAutoPtr<MediaPipelineFilter> filter,
const RefPtr<MediaSessionConduit>& aConduit);
nsresult CreateAudioConduit(const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
RefPtr<MediaSessionConduit>* aConduitp);
nsresult GetOrCreateAudioConduit(const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
RefPtr<MediaSessionConduit>* aConduitp);
nsresult CreateVideoConduit(const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
RefPtr<MediaSessionConduit>* aConduitp);
nsresult GetOrCreateVideoConduit(const JsepTrackPair& aTrackPair,
const JsepTrack& aTrack,
RefPtr<MediaSessionConduit>* aConduitp);
MediaConduitErrorCode EnsureExternalCodec(VideoSessionConduit& aConduit,
VideoCodecConfig* aConfig,

Просмотреть файл

@ -86,8 +86,6 @@ void
SourceStreamInfo::RemoveTrack(const std::string& trackId)
{
mTracks.erase(trackId);
// Pipelines are already holding onto a ref to these.
mConduits.erase(trackId);
RefPtr<MediaPipeline> pipeline = GetPipelineByTrackId_m(trackId);
if (pipeline) {
mPipelines.erase(trackId);
@ -1134,19 +1132,6 @@ SourceStreamInfo::StorePipeline(
return NS_OK;
}
nsresult
SourceStreamInfo::StoreConduit(const std::string& trackId,
RefPtr<MediaSessionConduit> aConduit)
{
MOZ_ASSERT(mConduits.find(trackId) == mConduits.end());
if (mConduits.find(trackId) != mConduits.end()) {
CSFLogError(logTag, "%s: Storing duplicate track", __FUNCTION__);
return NS_ERROR_FAILURE;
}
mConduits[trackId] = aConduit;
return NS_OK;
}
void
RemoteSourceStreamInfo::SyncPipeline(
RefPtr<MediaPipelineReceive> aPipeline)
@ -1193,22 +1178,4 @@ RefPtr<MediaPipeline> SourceStreamInfo::GetPipelineByTrackId_m(
return nullptr;
}
RefPtr<MediaSessionConduit> SourceStreamInfo::GetConduitByTrackId_m(
const std::string& trackId) {
ASSERT_ON_THREAD(mParent->GetMainThread());
// Refuse to hand out references if we're tearing down.
// (Since teardown involves a dispatch to and from STS before MediaConduits
// are released, it is safe to start other dispatches to and from STS with a
// RefPtr<MediaConduit>, since that reference won't be the last one
// standing)
if (mMediaStream) {
if (mConduits.count(trackId)) {
return mConduits[trackId];
}
}
return nullptr;
}
} // namespace mozilla

Просмотреть файл

@ -86,9 +86,6 @@ public:
nsresult StorePipeline(const std::string& trackId,
const RefPtr<MediaPipeline>& aPipeline);
nsresult StoreConduit(const std::string& trackId,
RefPtr<MediaSessionConduit> aConduit);
virtual void AddTrack(const std::string& trackId) { mTracks.insert(trackId); }
void RemoveTrack(const std::string& trackId);
bool HasTrack(const std::string& trackId) const
@ -102,7 +99,6 @@ public:
const std::map<std::string, RefPtr<MediaPipeline>>&
GetPipelines() const { return mPipelines; }
RefPtr<MediaPipeline> GetPipelineByTrackId_m(const std::string& trackId);
RefPtr<MediaSessionConduit> GetConduitByTrackId_m(const std::string& trackId);
const std::string& GetId() const { return mId; }
void DetachTransport_s();
@ -116,7 +112,6 @@ protected:
// and conduits are set up once offer/answer completes.
std::set<std::string> mTracks;
std::map<std::string, RefPtr<MediaPipeline>> mPipelines;
std::map<std::string, RefPtr<MediaSessionConduit>> mConduits;
};
// TODO(ekr@rtfm.com): Refactor {Local,Remote}SourceStreamInfo
@ -324,42 +319,43 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
static void DtlsConnected_m(const std::string& aParentHandle,
bool aPrivacyRequested);
RefPtr<MediaSessionConduit> GetConduit(const std::string& streamId,
const std::string& trackId,
bool aReceive) {
SourceStreamInfo* info;
if (aReceive) {
info = GetRemoteStreamById(streamId);
} else {
info = GetLocalStreamById(streamId);
}
if (!info) {
MOZ_ASSERT(false);
RefPtr<AudioSessionConduit> GetAudioConduit(size_t level) {
auto it = mConduits.find(level);
if (it == mConduits.end()) {
return nullptr;
}
return info->GetConduitByTrackId_m(trackId);
if (it->second.first) {
MOZ_ASSERT(false, "In GetAudioConduit, we found a video conduit!");
return nullptr;
}
return RefPtr<AudioSessionConduit>(
static_cast<AudioSessionConduit*>(it->second.second.get()));
}
RefPtr<VideoSessionConduit> GetVideoConduit(size_t level) {
auto it = mConduits.find(level);
if (it == mConduits.end()) {
return nullptr;
}
if (!it->second.first) {
MOZ_ASSERT(false, "In GetVideoConduit, we found an audio conduit!");
return nullptr;
}
return RefPtr<VideoSessionConduit>(
static_cast<VideoSessionConduit*>(it->second.second.get()));
}
// Add a conduit
void AddConduit(const std::string& streamId,
const std::string& trackId,
bool aReceive,
const RefPtr<MediaSessionConduit> &aConduit) {
SourceStreamInfo* info;
if (aReceive) {
info = GetRemoteStreamById(streamId);
} else {
info = GetLocalStreamById(streamId);
}
void AddAudioConduit(size_t level, const RefPtr<AudioSessionConduit> &aConduit) {
mConduits[level] = std::make_pair(false, aConduit);
}
if (!info) {
MOZ_ASSERT(false);
return;
}
info->StoreConduit(trackId, aConduit);
void AddVideoConduit(size_t level, const RefPtr<VideoSessionConduit> &aConduit) {
mConduits[level] = std::make_pair(true, aConduit);
}
// ICE state signals
@ -455,6 +451,8 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
// This is only accessed on the main thread (with one special exception)
nsTArray<nsRefPtr<RemoteSourceStreamInfo> > mRemoteSourceStreams;
std::map<size_t, std::pair<bool, RefPtr<MediaSessionConduit>>> mConduits;
// Allow loopback for ICE.
bool mAllowIceLoopback;

Просмотреть файл

@ -528,7 +528,6 @@ class TransportConduitTest : public ::testing::Test
mozilla::SyncRunnable::DispatchToThread(gMainThread,
WrapRunnableNMRet(
&mozilla::AudioSessionConduit::Create,
nullptr,
&mAudioSession));
if( !mAudioSession )
ASSERT_NE(mAudioSession, (void*)nullptr);
@ -536,7 +535,6 @@ class TransportConduitTest : public ::testing::Test
mozilla::SyncRunnable::DispatchToThread(gMainThread,
WrapRunnableNMRet(
&mozilla::AudioSessionConduit::Create,
nullptr,
&mAudioSession2));
if( !mAudioSession2 )
ASSERT_NE(mAudioSession2, (void*)nullptr);
@ -547,9 +545,9 @@ class TransportConduitTest : public ::testing::Test
mAudioTransport = xport;
// attach the transport to audio-conduit
err = mAudioSession->AttachTransport(mAudioTransport);
err = mAudioSession->SetTransmitterTransport(mAudioTransport);
ASSERT_EQ(mozilla::kMediaConduitNoError, err);
err = mAudioSession2->AttachTransport(mAudioTransport);
err = mAudioSession2->SetReceiverTransport(mAudioTransport);
ASSERT_EQ(mozilla::kMediaConduitNoError, err);
//configure send and recv codecs on the audio-conduit
@ -594,8 +592,6 @@ class TransportConduitTest : public ::testing::Test
mozilla::SyncRunnable::DispatchToThread(gMainThread,
WrapRunnableNMRet(
&mozilla::VideoSessionConduit::Create,
nullptr,
false,
&mVideoSession));
if( !mVideoSession )
ASSERT_NE(mVideoSession, (void*)nullptr);
@ -604,8 +600,6 @@ class TransportConduitTest : public ::testing::Test
mozilla::SyncRunnable::DispatchToThread(gMainThread,
WrapRunnableNMRet(
&mozilla::VideoSessionConduit::Create,
nullptr,
true,
&mVideoSession2));
if( !mVideoSession2 )
ASSERT_NE(mVideoSession2,(void*)nullptr);
@ -625,9 +619,9 @@ class TransportConduitTest : public ::testing::Test
// attach the transport and renderer to video-conduit
err = mVideoSession2->AttachRenderer(mVideoRenderer);
ASSERT_EQ(mozilla::kMediaConduitNoError, err);
err = mVideoSession->AttachTransport(mVideoTransport);
err = mVideoSession->SetTransmitterTransport(mVideoTransport);
ASSERT_EQ(mozilla::kMediaConduitNoError, err);
err = mVideoSession2->AttachTransport(mVideoTransport);
err = mVideoSession2->SetReceiverTransport(mVideoTransport);
ASSERT_EQ(mozilla::kMediaConduitNoError, err);
//configure send and recv codecs on theconduit
@ -696,8 +690,6 @@ class TransportConduitTest : public ::testing::Test
mozilla::SyncRunnable::DispatchToThread(gMainThread,
WrapRunnableNMRet(
&mozilla::VideoSessionConduit::Create,
nullptr,
true,
&videoSession));
if( !videoSession )
ASSERT_NE(videoSession, (void*)nullptr);
@ -809,8 +801,6 @@ class TransportConduitTest : public ::testing::Test
mozilla::SyncRunnable::DispatchToThread(gMainThread,
WrapRunnableNMRet(
&mozilla::VideoSessionConduit::Create,
nullptr,
false,
&mVideoSession));
if( !mVideoSession )
ASSERT_NE(mVideoSession, (void*)nullptr);

Просмотреть файл

@ -132,7 +132,7 @@ class TestAgent {
public:
TestAgent() :
audio_config_(109, "opus", 48000, 960, 2, 64000),
audio_conduit_(mozilla::AudioSessionConduit::Create(nullptr)),
audio_conduit_(mozilla::AudioSessionConduit::Create()),
audio_(),
audio_pipeline_() {
}
@ -623,14 +623,14 @@ TEST_F(MediaPipelineFilterTest, TestFilterReport1SSRCTruncated) {
SSRC(16),
0,0,0
};
ASSERT_EQ(MediaPipelineFilter::FAIL,
ASSERT_EQ(MediaPipelineFilter::PASS,
filter.FilterRTCP(rr, sizeof(rr)));
const unsigned char sr[] = {
RTCP_TYPEINFO(1, MediaPipelineFilter::RECEIVER_REPORT_T, 12),
REPORT_FRAGMENT(16),
0,0,0
};
ASSERT_EQ(MediaPipelineFilter::FAIL,
ASSERT_EQ(MediaPipelineFilter::PASS,
filter.FilterRTCP(sr, sizeof(rr)));
}
@ -670,10 +670,18 @@ TEST_F(MediaPipelineFilterTest, TestFilterReport1Inconsistent) {
// So, when RTCP shows up with a remote SSRC that matches, and a local
// ssrc that doesn't, we assume the other end has messed up and put ssrcs
// from more than one m-line in the packet.
ASSERT_EQ(MediaPipelineFilter::FAIL,
// TODO: Currently, the webrtc.org code will continue putting old ssrcs
// in RTCP that have been negotiated away, causing the RTCP to be dropped
// if we leave this checking in. Not sure how we're supposed to prompt
// the webrtc.org code to stop doing this.
ASSERT_EQ(MediaPipelineFilter::PASS,
filter.FilterRTCP(rtcp_sr_s16_r17, sizeof(rtcp_sr_s16_r17)));
ASSERT_EQ(MediaPipelineFilter::FAIL,
ASSERT_EQ(MediaPipelineFilter::PASS,
filter.FilterRTCP(rtcp_rr_s16_r17, sizeof(rtcp_rr_s16_r17)));
//ASSERT_EQ(MediaPipelineFilter::FAIL,
// filter.FilterRTCP(rtcp_sr_s16_r17, sizeof(rtcp_sr_s16_r17)));
//ASSERT_EQ(MediaPipelineFilter::FAIL,
// filter.FilterRTCP(rtcp_rr_s16_r17, sizeof(rtcp_rr_s16_r17)));
}
TEST_F(MediaPipelineFilterTest, TestFilterReport1NeitherMatch) {
@ -712,23 +720,35 @@ TEST_F(MediaPipelineFilterTest, TestFilterReport2Inconsistent101) {
MediaPipelineFilter filter;
filter.AddRemoteSSRC(16);
filter.AddLocalSSRC(18);
ASSERT_EQ(MediaPipelineFilter::FAIL,
ASSERT_EQ(MediaPipelineFilter::PASS,
filter.FilterRTCP(rtcp_sr_s16_r17_18,
sizeof(rtcp_sr_s16_r17_18)));
ASSERT_EQ(MediaPipelineFilter::FAIL,
ASSERT_EQ(MediaPipelineFilter::PASS,
filter.FilterRTCP(rtcp_rr_s16_r17_18,
sizeof(rtcp_rr_s16_r17_18)));
//ASSERT_EQ(MediaPipelineFilter::FAIL,
// filter.FilterRTCP(rtcp_sr_s16_r17_18,
// sizeof(rtcp_sr_s16_r17_18)));
//ASSERT_EQ(MediaPipelineFilter::FAIL,
// filter.FilterRTCP(rtcp_rr_s16_r17_18,
// sizeof(rtcp_rr_s16_r17_18)));
}
TEST_F(MediaPipelineFilterTest, TestFilterReport2Inconsistent001) {
MediaPipelineFilter filter;
filter.AddLocalSSRC(18);
ASSERT_EQ(MediaPipelineFilter::FAIL,
ASSERT_EQ(MediaPipelineFilter::PASS,
filter.FilterRTCP(rtcp_sr_s16_r17_18,
sizeof(rtcp_sr_s16_r17_18)));
ASSERT_EQ(MediaPipelineFilter::FAIL,
ASSERT_EQ(MediaPipelineFilter::PASS,
filter.FilterRTCP(rtcp_rr_s16_r17_18,
sizeof(rtcp_rr_s16_r17_18)));
//ASSERT_EQ(MediaPipelineFilter::FAIL,
// filter.FilterRTCP(rtcp_sr_s16_r17_18,
// sizeof(rtcp_sr_s16_r17_18)));
//ASSERT_EQ(MediaPipelineFilter::FAIL,
// filter.FilterRTCP(rtcp_rr_s16_r17_18,
// sizeof(rtcp_rr_s16_r17_18)));
}
TEST_F(MediaPipelineFilterTest, TestFilterUnknownRTCPType) {

Просмотреть файл

@ -1424,7 +1424,7 @@ class SignalingAgent {
// Check feedback method for video
if (flags & PIPELINE_VIDEO) {
if ((flags & PIPELINE_VIDEO) && !(flags & PIPELINE_SEND)) {
mozilla::MediaSessionConduit *conduit = pipeline->Conduit();
ASSERT_TRUE(conduit);
ASSERT_EQ(conduit->type(), mozilla::MediaSessionConduit::VIDEO);
@ -2777,9 +2777,9 @@ TEST_P(SignalingTest, RenegotiationOffererSwapsMsids)
WaitForCompleted();
// Wait for some more data to get received
ASSERT_TRUE_WAIT(a2_->GetPacketsReceived(1) >= 40, kDefaultTimeout * 2);
ASSERT_TRUE_WAIT(a2_->GetPacketsReceived(1) >= 80, kDefaultTimeout * 2);
// Not really packets, but audio segments, happens later
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(1) >= 40, kDefaultTimeout * 2);
ASSERT_TRUE_WAIT(a1_->GetPacketsSent(1) >= 80, kDefaultTimeout * 2);
CloseStreams();