Bug 864654: merge backend for send and receive VideoConduits to match AudioConduits & cleanup r=ekr

This commit is contained in:
Randell Jesup 2013-10-23 06:20:54 -04:00
Родитель afa7285335
Коммит 19547716aa
6 изменённых файлов: 299 добавлений и 192 удалений

Просмотреть файл

@ -151,10 +151,10 @@ class VideoSessionConduit : public MediaSessionConduit
public:
/**
* Factory function to create and initialize a Video Conduit Session
* return: Concrete VideoSessionConduitObject or NULL in the case
* return: Concrete VideoSessionConduitObject or nullptr in the case
* of failure
*/
static RefPtr<VideoSessionConduit> Create();
static RefPtr<VideoSessionConduit> Create(VideoSessionConduit *aOther);
enum FrameRequestType
{
@ -260,8 +260,8 @@ class AudioSessionConduit : public MediaSessionConduit
public:
/**
* Factory function to create and initialize a Video Conduit Session
* return: Concrete VideoSessionConduitObject or NULL in the case
* Factory function to create and initialize an Audio Conduit Session
* return: Concrete AudioSessionConduitObject or nullptr in the case
* of failure
*/
static mozilla::RefPtr<AudioSessionConduit> Create(AudioSessionConduit *aOther);

Просмотреть файл

@ -10,6 +10,8 @@
#include "VideoConduit.h"
#include "AudioConduit.h"
#include "nsThreadUtils.h"
#include "webrtc/video_engine/include/vie_errors.h"
#ifdef MOZ_WIDGET_ANDROID
@ -23,15 +25,22 @@ namespace mozilla {
static const char* logTag ="WebrtcVideoSessionConduit";
// 32 bytes is what WebRTC CodecInst expects
const unsigned int WebrtcVideoConduit::CODEC_PLNAME_SIZE = 32;
//Factory Implementation
mozilla::RefPtr<VideoSessionConduit> VideoSessionConduit::Create()
/**
* Factory Method for VideoConduit
*/
mozilla::RefPtr<VideoSessionConduit> VideoSessionConduit::Create(VideoSessionConduit *aOther)
{
#ifdef MOZILLA_INTERNAL_API
// unit tests create their own "main thread"
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
#endif
CSFLogDebug(logTag, "%s ", __FUNCTION__);
WebrtcVideoConduit* obj = new WebrtcVideoConduit();
if(obj->Init() != kMediaConduitNoError)
if(obj->Init(static_cast<WebrtcVideoConduit*>(aOther)) != kMediaConduitNoError)
{
CSFLogError(logTag, "%s VideoConduit Init Failed ", __FUNCTION__);
delete obj;
@ -43,6 +52,10 @@ mozilla::RefPtr<VideoSessionConduit> VideoSessionConduit::Create()
WebrtcVideoConduit::~WebrtcVideoConduit()
{
#ifdef MOZILLA_INTERNAL_API
// unit tests create their own "main thread"
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
#endif
CSFLogDebug(logTag, "%s ", __FUNCTION__);
for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++)
@ -52,29 +65,38 @@ WebrtcVideoConduit::~WebrtcVideoConduit()
delete mCurSendCodecConfig;
// The first one of a pair to be deleted shuts down media for both
//Deal with External Capturer
if(mPtrViECapture)
{
mPtrViECapture->DisconnectCaptureDevice(mCapId);
mPtrViECapture->ReleaseCaptureDevice(mCapId);
mPtrExtCapture = nullptr;
if (!mShutDown) {
mPtrViECapture->DisconnectCaptureDevice(mCapId);
mPtrViECapture->ReleaseCaptureDevice(mCapId);
mPtrExtCapture = nullptr;
if (mOtherDirection)
mOtherDirection->mPtrExtCapture = nullptr;
}
mPtrViECapture->Release();
}
//Deal with External Renderer
if(mPtrViERender)
{
if(mRenderer) {
mPtrViERender->StopRender(mChannel);
if (!mShutDown) {
if(mRenderer) {
mPtrViERender->StopRender(mChannel);
}
mPtrViERender->RemoveRenderer(mChannel);
}
mPtrViERender->RemoveRenderer(mChannel);
mPtrViERender->Release();
}
//Deal with the transport
if(mPtrViENetwork)
{
mPtrViENetwork->DeregisterSendTransport(mChannel);
if (!mShutDown) {
mPtrViENetwork->DeregisterSendTransport(mChannel);
}
mPtrViENetwork->Release();
}
@ -85,10 +107,12 @@ WebrtcVideoConduit::~WebrtcVideoConduit()
if(mPtrViEBase)
{
mPtrViEBase->StopSend(mChannel);
mPtrViEBase->StopReceive(mChannel);
SyncTo(nullptr);
mPtrViEBase->DeleteChannel(mChannel);
if (!mShutDown) {
mPtrViEBase->StopSend(mChannel);
mPtrViEBase->StopReceive(mChannel);
SyncTo(nullptr);
mPtrViEBase->DeleteChannel(mChannel);
}
mPtrViEBase->Release();
}
@ -96,51 +120,73 @@ WebrtcVideoConduit::~WebrtcVideoConduit()
{
mPtrRTP->Release();
}
if(mVideoEngine)
if (mOtherDirection)
{
webrtc::VideoEngine::Delete(mVideoEngine);
// mOtherDirection owns these now!
mOtherDirection->mOtherDirection = NULL;
// let other side we terminated the channel
mOtherDirection->mShutDown = true;
mVideoEngine = nullptr;
} else {
// only one opener can call Delete. Have it be the last to close.
if(mVideoEngine)
{
webrtc::VideoEngine::Delete(mVideoEngine);
}
}
}
/**
* Peforms intialization of the MANDATORY components of the Video Engine
*/
MediaConduitErrorCode WebrtcVideoConduit::Init()
MediaConduitErrorCode WebrtcVideoConduit::Init(WebrtcVideoConduit *other)
{
CSFLogDebug(logTag, "%s this=%p other=%p", __FUNCTION__, this, other);
CSFLogDebug(logTag, "%s ", __FUNCTION__);
if (other) {
MOZ_ASSERT(!other->mOtherDirection);
other->mOtherDirection = this;
mOtherDirection = other;
// only one can call ::Create()/GetVideoEngine()
MOZ_ASSERT(other->mVideoEngine);
mVideoEngine = other->mVideoEngine;
} else {
#ifdef MOZ_WIDGET_ANDROID
jobject context = jsjni_GetGlobalContextRef();
jobject context = jsjni_GetGlobalContextRef();
// get the JVM
JavaVM *jvm = jsjni_GetVM();
// get the JVM
JavaVM *jvm = jsjni_GetVM();
if (webrtc::VideoEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
CSFLogError(logTag, "%s: could not set Android objects", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if (webrtc::VideoEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
CSFLogError(logTag, "%s: could not set Android objects", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
#endif
if( !(mVideoEngine = webrtc::VideoEngine::Create()) )
{
CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
PRLogModuleInfo *logs = GetWebRTCLogInfo();
if (!gWebrtcTraceLoggingOn && logs && logs->level > 0) {
// no need to a critical section or lock here
gWebrtcTraceLoggingOn = 1;
const char *file = PR_GetEnv("WEBRTC_TRACE_FILE");
if (!file) {
file = "WebRTC.log";
//Per WebRTC APIs below function calls return NULL on failure
if( !(mVideoEngine = webrtc::VideoEngine::Create()) )
{
CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
PRLogModuleInfo *logs = GetWebRTCLogInfo();
if (!gWebrtcTraceLoggingOn && logs && logs->level > 0) {
// no need to a critical section or lock here
gWebrtcTraceLoggingOn = 1;
const char *file = PR_GetEnv("WEBRTC_TRACE_FILE");
if (!file) {
file = "WebRTC.log";
}
CSFLogDebug(logTag, "%s Logging webrtc to %s level %d", __FUNCTION__,
file, logs->level);
mVideoEngine->SetTraceFilter(logs->level);
mVideoEngine->SetTraceFile(file);
}
CSFLogDebug(logTag, "%s Logging webrtc to %s level %d", __FUNCTION__,
file, logs->level);
mVideoEngine->SetTraceFilter(logs->level);
mVideoEngine->SetTraceFile(file);
}
if( !(mPtrViEBase = ViEBase::GetInterface(mVideoEngine)))
@ -179,69 +225,72 @@ MediaConduitErrorCode WebrtcVideoConduit::Init()
return kMediaConduitSessionNotInited;
}
CSFLogDebug(logTag, "%s Engine Created: Init'ng the interfaces ",__FUNCTION__);
if (other) {
mChannel = other->mChannel;
mPtrExtCapture = other->mPtrExtCapture;
mCapId = other->mCapId;
} else {
CSFLogDebug(logTag, "%s Engine Created: Init'ng the interfaces ",__FUNCTION__);
if(mPtrViEBase->Init() == -1)
{
CSFLogError(logTag, " %s Video Engine Init Failed %d ",__FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitSessionNotInited;
if(mPtrViEBase->Init() == -1)
{
CSFLogError(logTag, " %s Video Engine Init Failed %d ",__FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitSessionNotInited;
}
if(mPtrViEBase->CreateChannel(mChannel) == -1)
{
CSFLogError(logTag, " %s Channel creation Failed %d ",__FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitChannelError;
}
if(mPtrViENetwork->RegisterSendTransport(mChannel, *this) == -1)
{
CSFLogError(logTag, "%s ViENetwork Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitTransportRegistrationFail;
}
if(mPtrViECapture->AllocateExternalCaptureDevice(mCapId,
mPtrExtCapture) == -1)
{
CSFLogError(logTag, "%s Unable to Allocate capture module: %d ",
__FUNCTION__, mPtrViEBase->LastError());
return kMediaConduitCaptureError;
}
if(mPtrViECapture->ConnectCaptureDevice(mCapId,mChannel) == -1)
{
CSFLogError(logTag, "%s Unable to Connect capture module: %d ",
__FUNCTION__,mPtrViEBase->LastError());
return kMediaConduitCaptureError;
}
if(mPtrViERender->AddRenderer(mChannel,
webrtc::kVideoI420,
(webrtc::ExternalRenderer*) this) == -1)
{
CSFLogError(logTag, "%s Failed to added external renderer ", __FUNCTION__);
return kMediaConduitInvalidRenderer;
}
// Set up some parameters, per juberti. Set MTU.
if(mPtrViENetwork->SetMTU(mChannel, 1200) != 0)
{
CSFLogError(logTag, "%s MTU Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitMTUError;
}
// Turn on RTCP and loss feedback reporting.
if(mPtrRTP->SetRTCPStatus(mChannel, webrtc::kRtcpCompound_RFC4585) != 0)
{
CSFLogError(logTag, "%s RTCPStatus Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitRTCPStatusError;
}
}
if(mPtrViEBase->CreateChannel(mChannel) == -1)
{
CSFLogError(logTag, " %s Channel creation Failed %d ",__FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitChannelError;
}
if(mPtrViENetwork->RegisterSendTransport(mChannel, *this) == -1)
{
CSFLogError(logTag, "%s ViENetwork Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitTransportRegistrationFail;
}
mPtrExtCapture = 0;
if(mPtrViECapture->AllocateExternalCaptureDevice(mCapId,
mPtrExtCapture) == -1)
{
CSFLogError(logTag, "%s Unable to Allocate capture module: %d ",
__FUNCTION__, mPtrViEBase->LastError());
return kMediaConduitCaptureError;
}
if(mPtrViECapture->ConnectCaptureDevice(mCapId,mChannel) == -1)
{
CSFLogError(logTag, "%s Unable to Connect capture module: %d ",
__FUNCTION__,mPtrViEBase->LastError());
return kMediaConduitCaptureError;
}
if(mPtrViERender->AddRenderer(mChannel,
webrtc::kVideoI420,
(webrtc::ExternalRenderer*) this) == -1)
{
CSFLogError(logTag, "%s Failed to added external renderer ", __FUNCTION__);
return kMediaConduitInvalidRenderer;
}
// Set up some parameters, per juberti. Set MTU.
if(mPtrViENetwork->SetMTU(mChannel, 1200) != 0)
{
CSFLogError(logTag, "%s MTU Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitMTUError;
}
// Turn on RTCP and loss feedback reporting.
if(mPtrRTP->SetRTCPStatus(mChannel, webrtc::kRtcpCompound_RFC4585) != 0)
{
CSFLogError(logTag, "%s RTCPStatus Failed %d ", __FUNCTION__,
mPtrViEBase->LastError());
return kMediaConduitRTCPStatusError;
}
CSFLogError(logTag, "%s Initialization Done", __FUNCTION__);
return kMediaConduitNoError;
}
@ -251,15 +300,23 @@ WebrtcVideoConduit::SyncTo(WebrtcAudioConduit *aConduit)
{
CSFLogDebug(logTag, "%s Synced to %p", __FUNCTION__, aConduit);
// SyncTo(value) syncs to the AudioConduit, and if already synced replaces
// the current sync target. SyncTo(nullptr) cancels any existing sync and
// releases the strong ref to AudioConduit.
if (aConduit) {
mPtrViEBase->SetVoiceEngine(aConduit->GetVoiceEngine());
mPtrViEBase->ConnectAudioChannel(mChannel, aConduit->GetChannel());
// NOTE: this means the VideoConduit will keep the AudioConduit alive!
mSyncedTo = aConduit;
} else if (mSyncedTo) {
} else if ((mOtherDirection && mOtherDirection->mSyncedTo) || mSyncedTo) {
mPtrViEBase->DisconnectAudioChannel(mChannel);
mPtrViEBase->SetVoiceEngine(nullptr);
mSyncedTo = nullptr;
}
// Now manage the shared sync reference (ugly)
if (mSyncedTo || !mOtherDirection ) {
mSyncedTo = aConduit;
} else {
mOtherDirection->mSyncedTo = aConduit;
}
}
@ -312,11 +369,10 @@ WebrtcVideoConduit::AttachTransport(mozilla::RefPtr<TransportInterface> aTranspo
CSFLogDebug(logTag, "%s ", __FUNCTION__);
if(!aTransport)
{
CSFLogError(logTag, "%s NULL Transport ", __FUNCTION__);
MOZ_ASSERT(PR_FALSE);
CSFLogError(logTag, "%s NULL Transport", __FUNCTION__);
return kMediaConduitInvalidTransport;
}
//Assign the transport
// set the transport
mTransport = aTransport;
return kMediaConduitNoError;
}
@ -360,7 +416,6 @@ WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
}
}
//reset the flag
mEngineTransmitting = false;
// we should be good here to set the new codec.
@ -416,7 +471,7 @@ WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
return kMediaConduitUnknownError;
}
//Copy the applied codec for future reference
//Copy the applied config for future reference.
delete mCurSendCodecConfig;
mCurSendCodecConfig = new VideoCodecConfig(*codecConfig);
@ -438,6 +493,8 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs(
bool success = false;
std::string payloadName;
// are we receiving already? If so, stop receiving and playout
// since we can't apply new recv codec when the engine is playing.
if(mEngineReceiving)
{
CSFLogDebug(logTag, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__);
@ -456,6 +513,8 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs(
}
}
mEngineReceiving = false;
if(codecConfigList.empty())
{
CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__);
@ -721,7 +780,6 @@ WebrtcVideoConduit::SelectSendResolution(unsigned short width,
return true;
}
MediaConduitErrorCode
WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
unsigned int video_frame_length,
@ -730,7 +788,6 @@ WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
VideoType video_type,
uint64_t capture_time)
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
//check for the parameters sanity
@ -772,6 +829,7 @@ WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
}
//insert the frame to video engine in I420 format only
MOZ_ASSERT(mPtrExtCapture);
if(mPtrExtCapture->IncomingFrame(video_frame,
video_frame_length,
width, height,
@ -783,7 +841,7 @@ WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
return kMediaConduitCaptureError;
}
CSFLogError(logTag, "%s Inserted A Frame", __FUNCTION__);
CSFLogDebug(logTag, "%s Inserted a frame", __FUNCTION__);
return kMediaConduitNoError;
}
@ -791,7 +849,7 @@ WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
MediaConduitErrorCode
WebrtcVideoConduit::ReceivedRTPPacket(const void *data, int len)
{
CSFLogError(logTag, "%s: Channel %d, Len %d ", __FUNCTION__, mChannel, len);
CSFLogDebug(logTag, "%s: Channel %d, Len %d ", __FUNCTION__, mChannel, len);
// Media Engine should be receiving already.
if(mEngineReceiving)
@ -808,7 +866,7 @@ WebrtcVideoConduit::ReceivedRTPPacket(const void *data, int len)
return kMediaConduitRTPRTCPModuleError;
}
} else {
CSFLogError(logTag, "%s Engine Error: Not Receiving !!! ", __FUNCTION__);
CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
@ -818,12 +876,11 @@ WebrtcVideoConduit::ReceivedRTPPacket(const void *data, int len)
MediaConduitErrorCode
WebrtcVideoConduit::ReceivedRTCPPacket(const void *data, int len)
{
CSFLogError(logTag, " %s Channel %d, Len %d ", __FUNCTION__, mChannel, len);
CSFLogDebug(logTag, " %s Channel %d, Len %d ", __FUNCTION__, mChannel, len);
//Media Engine should be receiving already
if(mEngineTransmitting)
{
//let the engine know of RTCP packet to decode.
if(mPtrViENetwork->ReceivedRTCPPacket(mChannel,data,len) == -1)
{
int error = mPtrViEBase->LastError();
@ -835,7 +892,7 @@ WebrtcVideoConduit::ReceivedRTCPPacket(const void *data, int len)
return kMediaConduitRTPRTCPModuleError;
}
} else {
CSFLogError(logTag, "%s: Engine Error: Not Receiving", __FUNCTION__);
CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
return kMediaConduitNoError;
@ -844,32 +901,53 @@ WebrtcVideoConduit::ReceivedRTCPPacket(const void *data, int len)
//WebRTC::RTP Callback Implementation
int WebrtcVideoConduit::SendPacket(int channel, const void* data, int len)
{
CSFLogError(logTag, "%s Channel %d, len %d ", __FUNCTION__, channel, len);
CSFLogDebug(logTag, "%s : channel %d len %d %s", __FUNCTION__, channel, len,
(mEngineReceiving && mOtherDirection) ? "(using mOtherDirection)" : "");
if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK))
if (mEngineReceiving)
{
CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
return len;
} else {
CSFLogError(logTag, "%s Failed", __FUNCTION__);
if (mOtherDirection)
{
return mOtherDirection->SendPacket(channel, data, len);
}
CSFLogDebug(logTag, "%s : Asked to send RTP without an RTP sender on channel %d",
__FUNCTION__, channel);
return -1;
} else {
if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK))
{
CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
return len;
} else {
CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
return -1;
}
}
}
int WebrtcVideoConduit::SendRTCPPacket(int channel, const void* data, int len)
{
CSFLogError(logTag, "%s : channel %d , len %d ", __FUNCTION__, channel,len);
CSFLogDebug(logTag, "%s : channel %d , len %d ", __FUNCTION__, channel,len);
// can't enable this assertion, because we do. Suppress it
// NS_ASSERTION(mEngineReceiving,"We shouldn't send RTCP on the receiver side");
if(mEngineReceiving && mTransport && (mTransport->SendRtcpPacket(data, len) == NS_OK))
{
CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
return len;
} else {
CSFLogError(logTag, "%s Failed", __FUNCTION__);
return -1;
}
if (mEngineTransmitting)
{
if (mOtherDirection)
{
return mOtherDirection->SendRTCPPacket(channel, data, len);
}
}
// We come here if we have only one pipeline/conduit setup,
// such as for unidirectional streams.
// We also end up here if we are receiving
if(mTransport && mTransport->SendRtcpPacket(data, len) == NS_OK)
{
CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
return len;
} else {
CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__);
return -1;
}
}
// WebRTC::ExternalMedia Implementation
@ -927,6 +1005,7 @@ WebrtcVideoConduit::CodecConfigToWebRTCCodec(const VideoCodecConfig* codecInfo,
cinst.maxBitrate = 2000;
}
//Copy the codec passed into Conduit's database
bool
WebrtcVideoConduit::CopyCodecToDB(const VideoCodecConfig* codecInfo)
{
@ -935,26 +1014,6 @@ WebrtcVideoConduit::CopyCodecToDB(const VideoCodecConfig* codecInfo)
return true;
}
/**
* Checks if the codec is already in Conduit's database
*/
bool
WebrtcVideoConduit::CheckCodecForMatch(const VideoCodecConfig* codecInfo) const
{
//the db should have atleast one codec
for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++)
{
if(CheckCodecsForMatch(mRecvCodecList[i],codecInfo))
{
//match
return true;
}
}
//no match
return false;
}
bool
WebrtcVideoConduit::CheckCodecsForMatch(const VideoCodecConfig* curCodecConfig,
const VideoCodecConfig* codecInfo) const
@ -975,16 +1034,35 @@ WebrtcVideoConduit::CheckCodecsForMatch(const VideoCodecConfig* curCodecConfig,
return false;
}
/**
* Checks if the codec is already in Conduit's database
*/
bool
WebrtcVideoConduit::CheckCodecForMatch(const VideoCodecConfig* codecInfo) const
{
//the db should have atleast one codec
for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++)
{
if(CheckCodecsForMatch(mRecvCodecList[i],codecInfo))
{
//match
return true;
}
}
//no match or empty local db
return false;
}
/**
* Perform validation on the codecConfig to be applied
* Verifies if the codec is already applied.
*/
MediaConduitErrorCode
WebrtcVideoConduit::ValidateCodecConfig(const VideoCodecConfig* codecInfo,
bool send) const
bool send) const
{
bool codecAppliedAlready = false;
if(!codecInfo)
{
CSFLogError(logTag, "%s Null CodecConfig ", __FUNCTION__);

Просмотреть файл

@ -43,9 +43,7 @@ class WebrtcVideoConduit:public VideoSessionConduit
,public webrtc::Transport
,public webrtc::ExternalRenderer
{
public:
//VoiceEngine defined constant for Payload Name Size.
static const unsigned int CODEC_PLNAME_SIZE;
@ -98,7 +96,7 @@ public:
const std::vector<VideoCodecConfig* >& codecConfigList);
/**
* Register External Transport to this Conduit. RTP and RTCP frames from the VoiceEnigne
* Register Transport for this Conduit. RTP and RTCP frames from the VideoEngine
* shall be passed to the registered transport for transporting externally.
*/
virtual MediaConduitErrorCode AttachTransport(mozilla::RefPtr<TransportInterface> aTransport);
@ -133,13 +131,13 @@ public:
/**
* Webrtc transport implementation to send and receive RTP packet.
* AudioConduit registers itself as ExternalTransport to the VideoEngine
* VideoConduit registers itself as ExternalTransport to the VideoEngine
*/
virtual int SendPacket(int channel, const void *data, int len) ;
/**
* Webrtc transport implementation to send and receive RTCP packet.
* AudioConduit registers itself as ExternalTransport to the VideoEngine
* VideoConduit registers itself as ExternalTransport to the VideoEngine
*/
virtual int SendRTCPPacket(int channel, const void *data, int len) ;
@ -175,6 +173,8 @@ public:
}
WebrtcVideoConduit():
mOtherDirection(nullptr),
mShutDown(false),
mVideoEngine(nullptr),
mTransport(nullptr),
mRenderer(nullptr),
@ -195,12 +195,12 @@ public:
{
}
virtual ~WebrtcVideoConduit() ;
MediaConduitErrorCode Init(WebrtcVideoConduit *other);
MediaConduitErrorCode Init();
int GetChannel() { return mChannel; }
webrtc::VideoEngine* GetVideoEngine() { return mVideoEngine; }
private:
@ -228,8 +228,17 @@ private:
//Utility function to dump recv codec database
void DumpCodecDB() const;
webrtc::VideoEngine* mVideoEngine;
// The two sides of a send/receive pair of conduits each keep a pointer to the other.
// They also share a single VideoEngine and mChannel. Shutdown must be coordinated
// carefully to avoid double-freeing or accessing after one frees.
WebrtcVideoConduit* mOtherDirection;
// The other side has shut down our mChannel and related items already
bool mShutDown;
// A few of these are shared by both directions. They're released by the last
// conduit to die.
webrtc::VideoEngine* mVideoEngine; // shared
mozilla::RefPtr<TransportInterface> mTransport;
mozilla::RefPtr<VideoRenderer> mRenderer;
@ -238,7 +247,7 @@ private:
webrtc::ViECodec* mPtrViECodec;
webrtc::ViENetwork* mPtrViENetwork;
webrtc::ViERender* mPtrViERender;
webrtc::ViEExternalCapture* mPtrExtCapture;
webrtc::ViEExternalCapture* mPtrExtCapture; // shared
webrtc::ViERTP_RTCP* mPtrRTP;
// Engine state we are concerned with.
@ -255,8 +264,6 @@ private:
mozilla::RefPtr<WebrtcAudioConduit> mSyncedTo;
};
} // end namespace
#endif

Просмотреть файл

@ -1573,11 +1573,14 @@ static int vcmRxStartICE_m(cc_mcapid_t mcap_id,
std::vector<mozilla::AudioCodecConfig *> configs;
// Instantiate an appropriate conduit
mozilla::RefPtr<mozilla::AudioSessionConduit> tx_conduit =
mozilla::RefPtr<mozilla::MediaSessionConduit> tx_conduit =
pc.impl()->media()->GetConduit(level, false);
MOZ_ASSERT_IF(tx_conduit, tx_conduit->type() == MediaSessionConduit::AUDIO);
// The two sides of a send/receive pair of conduits each keep a raw pointer to the other,
// and are responsible for cleanly shutting down.
mozilla::RefPtr<mozilla::AudioSessionConduit> conduit =
mozilla::AudioSessionConduit::Create(tx_conduit);
mozilla::AudioSessionConduit::Create(static_cast<AudioSessionConduit *>(tx_conduit.get()));
if(!conduit)
return VCM_ERROR;
@ -1625,11 +1628,19 @@ static int vcmRxStartICE_m(cc_mcapid_t mcap_id,
std::vector<mozilla::VideoCodecConfig *> configs;
// Instantiate an appropriate conduit
mozilla::RefPtr<mozilla::MediaSessionConduit> tx_conduit =
pc.impl()->media()->GetConduit(level, false);
MOZ_ASSERT_IF(tx_conduit, tx_conduit->type() == MediaSessionConduit::VIDEO);
// The two sides of a send/receive pair of conduits each keep a raw pointer to the other,
// and are responsible for cleanly shutting down.
mozilla::RefPtr<mozilla::VideoSessionConduit> conduit =
mozilla::VideoSessionConduit::Create();
mozilla::VideoSessionConduit::Create(static_cast<VideoSessionConduit *>(tx_conduit.get()));
if(!conduit)
return VCM_ERROR;
pc.impl()->media()->AddConduit(level, true, conduit);
mozilla::VideoCodecConfig *config_raw;
for(int i=0; i <num_payloads; i++)
@ -2232,11 +2243,14 @@ static int vcmTxStartICE_m(cc_mcapid_t mcap_id,
mozilla::ScopedDeletePtr<mozilla::AudioCodecConfig> config(config_raw);
// Instantiate an appropriate conduit
mozilla::RefPtr<mozilla::AudioSessionConduit> rx_conduit =
mozilla::RefPtr<mozilla::MediaSessionConduit> rx_conduit =
pc.impl()->media()->GetConduit(level, true);
MOZ_ASSERT_IF(rx_conduit, rx_conduit->type() == MediaSessionConduit::AUDIO);
// The two sides of a send/receive pair of conduits each keep a raw pointer to the other,
// and are responsible for cleanly shutting down.
mozilla::RefPtr<mozilla::AudioSessionConduit> conduit =
mozilla::AudioSessionConduit::Create(rx_conduit);
mozilla::AudioSessionConduit::Create(static_cast<AudioSessionConduit *>(rx_conduit.get()));
if (!conduit || conduit->ConfigureSendMediaCodec(config))
return VCM_ERROR;
@ -2277,13 +2291,21 @@ static int vcmTxStartICE_m(cc_mcapid_t mcap_id,
mozilla::ScopedDeletePtr<mozilla::VideoCodecConfig> config(config_raw);
// Instantiate an appropriate conduit
mozilla::RefPtr<mozilla::MediaSessionConduit> rx_conduit =
pc.impl()->media()->GetConduit(level, true);
MOZ_ASSERT_IF(rx_conduit, rx_conduit->type() == MediaSessionConduit::VIDEO);
// The two sides of a send/receive pair of conduits each keep a raw pointer to the other,
// and are responsible for cleanly shutting down.
mozilla::RefPtr<mozilla::VideoSessionConduit> conduit =
mozilla::VideoSessionConduit::Create();
mozilla::VideoSessionConduit::Create(static_cast<VideoSessionConduit *>(rx_conduit.get()));
// Find the appropriate media conduit config
if (!conduit || conduit->ConfigureSendMediaCodec(config))
return VCM_ERROR;
pc.impl()->media()->AddConduit(level, false, conduit);
// Now we have all the pieces, create the pipeline
mozilla::RefPtr<mozilla::MediaPipeline> pipeline =
new mozilla::MediaPipelineTransmit(

Просмотреть файл

@ -308,29 +308,29 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
// Add a transport flow
void AddTransportFlow(int aIndex, bool aRtcp,
mozilla::RefPtr<mozilla::TransportFlow> aFlow) {
const mozilla::RefPtr<mozilla::TransportFlow> &aFlow) {
int index_inner = aIndex * 2 + (aRtcp ? 1 : 0);
MOZ_ASSERT(!mTransportFlows[index_inner]);
mTransportFlows[index_inner] = aFlow;
}
mozilla::RefPtr<mozilla::AudioSessionConduit> GetConduit(int aStreamIndex, bool aReceive) {
mozilla::RefPtr<mozilla::MediaSessionConduit> GetConduit(int aStreamIndex, bool aReceive) {
int index_inner = aStreamIndex * 2 + (aReceive ? 0 : 1);
if (mAudioConduits.find(index_inner) == mAudioConduits.end())
if (mConduits.find(index_inner) == mConduits.end())
return NULL;
return mAudioConduits[index_inner];
return mConduits[index_inner];
}
// Add a conduit
void AddConduit(int aIndex, bool aReceive,
const mozilla::RefPtr<mozilla::AudioSessionConduit> &aConduit) {
const mozilla::RefPtr<mozilla::MediaSessionConduit> &aConduit) {
int index_inner = aIndex * 2 + (aReceive ? 0 : 1);
MOZ_ASSERT(!mAudioConduits[index_inner]);
mAudioConduits[index_inner] = aConduit;
MOZ_ASSERT(!mConduits[index_inner]);
mConduits[index_inner] = aConduit;
}
// ICE state signals
@ -374,7 +374,7 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
// Conduits: even is receive, odd is transmit (for easier correlation with
// flows)
std::map<int, mozilla::RefPtr<mozilla::AudioSessionConduit> > mAudioConduits;
std::map<int, mozilla::RefPtr<mozilla::MediaSessionConduit> > mConduits;
// The main thread.
nsCOMPtr<nsIThread> mMainThread;

Просмотреть файл

@ -548,12 +548,12 @@ class TransportConduitTest : public ::testing::Test
{
int err = 0;
//get pointer to VideoSessionConduit
mVideoSession = mozilla::VideoSessionConduit::Create();
mVideoSession = mozilla::VideoSessionConduit::Create(NULL);
if( !mVideoSession )
ASSERT_NE(mVideoSession, (void*)NULL);
// This session is for other one
mVideoSession2 = mozilla::VideoSessionConduit::Create();
mVideoSession2 = mozilla::VideoSessionConduit::Create(NULL);
if( !mVideoSession2 )
ASSERT_NE(mVideoSession2,(void*)NULL);
@ -622,7 +622,7 @@ class TransportConduitTest : public ::testing::Test
int err = 0;
mozilla::RefPtr<mozilla::VideoSessionConduit> mVideoSession;
//get pointer to VideoSessionConduit
mVideoSession = mozilla::VideoSessionConduit::Create();
mVideoSession = mozilla::VideoSessionConduit::Create(NULL);
if( !mVideoSession )
ASSERT_NE(mVideoSession, (void*)NULL);
@ -726,7 +726,7 @@ class TransportConduitTest : public ::testing::Test
int err = 0;
// Get pointer to VideoSessionConduit.
mVideoSession = mozilla::VideoSessionConduit::Create();
mVideoSession = mozilla::VideoSessionConduit::Create(NULL);
if( !mVideoSession )
ASSERT_NE(mVideoSession, (void*)NULL);