Backout df75a87cce60 & 19e164f7d88d (bug 818670) for build bustage on a CLOSED TREE

This commit is contained in:
Ed Morley 2013-01-29 17:28:30 +00:00
Родитель 99fd17666b
Коммит 367cd59d9d
16 изменённых файлов: 93 добавлений и 394 удалений

Просмотреть файл

@ -89,11 +89,6 @@ public:
/* Stop the device and release the corresponding MediaStream */
virtual nsresult Stop(SourceMediaStream *aSource, TrackID aID) = 0;
/* Change device configuration. */
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise) = 0;
/* Return false if device is currently allocated or started */
bool IsAvailable() {
if (mState == kAllocated || mState == kStarted) {

Просмотреть файл

@ -44,9 +44,6 @@ public:
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop(SourceMediaStream*, TrackID);
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise) { return NS_OK; };
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
@ -89,9 +86,6 @@ public:
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop(SourceMediaStream*, TrackID);
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise) { return NS_OK; };
virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,

Просмотреть файл

@ -35,7 +35,6 @@
#include "voice_engine/include/voe_audio_processing.h"
#include "voice_engine/include/voe_volume_control.h"
#include "voice_engine/include/voe_external_media.h"
#include "voice_engine/include/voe_audio_processing.h"
// Video Engine
#include "video_engine/include/vie_base.h"
@ -91,9 +90,6 @@ public:
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop(SourceMediaStream*, TrackID);
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise) { return NS_OK; };
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
@ -181,10 +177,6 @@ public:
, mCapIndex(aIndex)
, mChannel(-1)
, mInitDone(false)
, mEchoOn(false), mAgcOn(false), mNoiseOn(false)
, mEchoCancel(webrtc::kEcDefault)
, mAGC(webrtc::kAgcDefault)
, mNoiseSuppress(webrtc::kNsDefault)
, mNullTransport(nullptr) {
MOZ_ASSERT(aVoiceEnginePtr);
mState = kReleased;
@ -202,10 +194,6 @@ public:
virtual nsresult Start(SourceMediaStream*, TrackID);
virtual nsresult Stop(SourceMediaStream*, TrackID);
virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise);
virtual void NotifyPull(MediaStreamGraph* aGraph,
SourceMediaStream *aSource,
TrackID aId,
@ -230,7 +218,6 @@ private:
webrtc::VoEBase* mVoEBase;
webrtc::VoEExternalMedia* mVoERender;
webrtc::VoENetwork* mVoENetwork;
webrtc::VoEAudioProcessing *mVoEProcessing;
// mMonitor protects mSources[] access/changes, and transitions of mState
// from kStarted to kStopped (which are combined with EndTrack()).
@ -246,11 +233,6 @@ private:
nsString mDeviceName;
nsString mDeviceUUID;
bool mEchoOn, mAgcOn, mNoiseOn;
webrtc::EcModes mEchoCancel;
webrtc::AgcModes mAGC;
webrtc::NsModes mNoiseSuppress;
NullTransport *mNullTransport;
};

Просмотреть файл

@ -46,60 +46,6 @@ MediaEngineWebRTCAudioSource::GetUUID(nsAString& aUUID)
return;
}
nsresult
MediaEngineWebRTCAudioSource::Config(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise)
{
LOG(("Audio config: aec: %d, agc: %d, noise: %d",
aEchoOn ? aEcho : -1,
aAgcOn ? aAGC : -1,
aNoiseOn ? aNoise : -1));
bool update_agc = (mAgcOn == aAgcOn);
bool update_noise = (mNoiseOn == aNoiseOn);
mAgcOn = aAgcOn;
mNoiseOn = aNoiseOn;
if ((webrtc::AgcModes) aAGC != webrtc::kAgcUnchanged) {
if (mAGC != (webrtc::AgcModes) aAGC) {
update_agc = true;
mAGC = (webrtc::AgcModes) aAGC;
}
}
if ((webrtc::NsModes) aNoise != webrtc::kNsUnchanged) {
if (mNoiseSuppress != (webrtc::NsModes) aNoise) {
update_noise = true;
mNoiseSuppress = (webrtc::NsModes) aNoise;
}
}
if (mInitDone) {
int error;
#if 0
// Until we can support feeding our full output audio from the browser
// through the MediaStream, this won't work. Or we need to move AEC to
// below audio input and output, perhaps invoked from here.
mEchoOn = aEchoOn;
if ((webrtc::EcModes) aEcho != webrtc::kEcUnchanged)
mEchoCancel = (webrtc::EcModes) aEcho;
mVoEProcessing->SetEcStatus(mEchoOn, aEcho);
#else
(void) aEcho; (void) aEchoOn; // suppress warnings
#endif
if (update_agc &&
0 != (error = mVoEProcessing->SetAgcStatus(mAgcOn, (webrtc::AgcModes) aAGC))) {
LOG(("%s Error setting AGC Status: %d ",__FUNCTION__, error));
}
if (update_noise &&
0 != (error = mVoEProcessing->SetNsStatus(mNoiseOn, (webrtc::NsModes) aNoise))) {
LOG(("%s Error setting NoiseSuppression Status: %d ",__FUNCTION__, error));
}
}
return NS_OK;
}
nsresult
MediaEngineWebRTCAudioSource::Allocate()
{
@ -160,11 +106,6 @@ MediaEngineWebRTCAudioSource::Start(SourceMediaStream* aStream, TrackID aID)
}
mState = kStarted;
// Configure audio processing in webrtc code
Config(mEchoOn, webrtc::kEcUnchanged,
mAgcOn, webrtc::kAgcUnchanged,
mNoiseOn, webrtc::kNsUnchanged);
if (mVoEBase->StartReceive(mChannel)) {
return NS_ERROR_FAILURE;
}
@ -251,11 +192,6 @@ MediaEngineWebRTCAudioSource::Init()
return;
}
mVoEProcessing = webrtc::VoEAudioProcessing::GetInterface(mVoiceEngine);
if (!mVoEProcessing) {
return;
}
mChannel = mVoEBase->CreateChannel();
if (mChannel < 0) {
return;

Просмотреть файл

@ -11,8 +11,6 @@
#include "nsIScriptGlobalObject.h"
#include "nsIPopupWindowManager.h"
#include "nsISupportsArray.h"
#include "nsIPrefService.h"
#include "nsIPrefBranch.h"
// For PR_snprintf
#include "prprf.h"
@ -384,30 +382,6 @@ public:
mAudioSource, mVideoSource, false));
mediaThread->Dispatch(runnable, NS_DISPATCH_NORMAL);
nsresult rv;
nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
if (NS_SUCCEEDED(rv)) {
nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
if (branch) {
int32_t aec = (int32_t) webrtc::kEcUnchanged;
int32_t agc = (int32_t) webrtc::kAgcUnchanged;
int32_t noise = (int32_t) webrtc::kNsUnchanged;
bool aec_on = false, agc_on = false, noise_on = false;
branch->GetBoolPref("media.peerconnection.aec_enabled", &aec_on);
branch->GetIntPref("media.peerconnection.aec", &aec);
branch->GetBoolPref("media.peerconnection.agc_enabled", &agc_on);
branch->GetIntPref("media.peerconnection.agc", &agc);
branch->GetBoolPref("media.peerconnection.noise_enabled", &noise_on);
branch->GetIntPref("media.peerconnection.noise", &noise);
mListener->AudioConfig(aec_on, (uint32_t) aec,
agc_on, (uint32_t) agc,
noise_on, (uint32_t) noise);
}
}
// We're in the main thread, so no worries here either.
nsCOMPtr<nsIDOMGetUserMediaSuccessCallback> success(mSuccess);
nsCOMPtr<nsIDOMGetUserMediaErrorCallback> error(mError);

Просмотреть файл

@ -21,8 +21,6 @@
#include "mozilla/StaticPtr.h"
#include "prlog.h"
#include "mtransport/runnable_utils.h"
namespace mozilla {
#ifdef PR_LOGGING
@ -130,20 +128,6 @@ public:
// Can be invoked from EITHER MainThread or MSG thread
void Invalidate();
void
AudioConfig(bool aEchoOn, uint32_t aEcho,
bool aAgcOn, uint32_t aAGC,
bool aNoiseOn, uint32_t aNoise)
{
if (mAudioSource) {
RUN_ON_THREAD(mMediaThread,
WrapRunnable(nsRefPtr<MediaEngineSource>(mAudioSource), // threadsafe
&MediaEngineSource::Config,
aEchoOn, aEcho, aAgcOn, aAGC, aNoiseOn, aNoise),
NS_DISPATCH_NORMAL);
}
}
void
Remove()
{

Просмотреть файл

@ -37,7 +37,6 @@ EXPORTS_mtransport = \
../transportlayerprsock.h \
../m_cpp_utils.h \
../runnable_utils.h \
../runnable_utils_generated.h \
../sigslot.h \
$(NULL)

Просмотреть файл

@ -3,13 +3,6 @@
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "AudioConduit.h"
#include "nsCOMPtr.h"
#include "mozilla/Services.h"
#include "nsServiceManagerUtils.h"
#include "nsIPrefService.h"
#include "nsIPrefBranch.h"
#include "nsThreadUtils.h"
#include "CSFLog.h"
#include "voice_engine/include/voe_errors.h"
@ -24,13 +17,11 @@ const unsigned int WebrtcAudioConduit::CODEC_PLNAME_SIZE = 32;
/**
* Factory Method for AudioConduit
*/
mozilla::RefPtr<AudioSessionConduit> AudioSessionConduit::Create(AudioSessionConduit *aOther)
mozilla::RefPtr<AudioSessionConduit> AudioSessionConduit::Create()
{
CSFLogDebug(logTag, "%s ", __FUNCTION__);
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
WebrtcAudioConduit* obj = new WebrtcAudioConduit();
if(obj->Init(static_cast<WebrtcAudioConduit*>(aOther)) != kMediaConduitNoError)
if(obj->Init() != kMediaConduitNoError)
{
CSFLogError(logTag, "%s AudioConduit Init Failed ", __FUNCTION__);
delete obj;
@ -45,8 +36,6 @@ mozilla::RefPtr<AudioSessionConduit> AudioSessionConduit::Create(AudioSessionCon
*/
WebrtcAudioConduit::~WebrtcAudioConduit()
{
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
CSFLogDebug(logTag, "%s ", __FUNCTION__);
for(std::vector<AudioCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++)
{
@ -55,27 +44,17 @@ WebrtcAudioConduit::~WebrtcAudioConduit()
delete mCurSendCodecConfig;
// The first one of a pair to be deleted shuts down media for both
if(mPtrVoEXmedia)
{
if (!mShutDown) {
mPtrVoEXmedia->SetExternalRecordingStatus(false);
mPtrVoEXmedia->SetExternalPlayoutStatus(false);
}
mPtrVoEXmedia->Release();
}
if(mPtrVoEProcessing)
{
mPtrVoEProcessing->Release();
}
//Deal with the transport
if(mPtrVoENetwork)
{
if (!mShutDown) {
mPtrVoENetwork->DeRegisterExternalTransport(mChannel);
}
mPtrVoENetwork->Release();
}
@ -86,48 +65,27 @@ WebrtcAudioConduit::~WebrtcAudioConduit()
if(mPtrVoEBase)
{
if (!mShutDown) {
mPtrVoEBase->StopPlayout(mChannel);
mPtrVoEBase->StopSend(mChannel);
mPtrVoEBase->StopReceive(mChannel);
mPtrVoEBase->DeleteChannel(mChannel);
mPtrVoEBase->Terminate();
}
mPtrVoEBase->Release();
}
if (mOtherDirection)
{
// mOtherDirection owns these now!
mOtherDirection->mOtherDirection = NULL;
// let other side we terminated the channel
mOtherDirection->mShutDown = true;
mVoiceEngine = nullptr;
} else {
// only one opener can call Delete. Have it be the last to close.
if(mVoiceEngine)
{
webrtc::VoiceEngine::Delete(mVoiceEngine);
}
}
}
/*
* WebRTCAudioConduit Implementation
*/
MediaConduitErrorCode WebrtcAudioConduit::Init(WebrtcAudioConduit *other)
MediaConduitErrorCode WebrtcAudioConduit::Init()
{
CSFLogDebug(logTag, "%s this=%p other=%p", __FUNCTION__, this, other);
CSFLogDebug(logTag, "%s ", __FUNCTION__);
if (other) {
MOZ_ASSERT(!other->mOtherDirection);
other->mOtherDirection = this;
mOtherDirection = other;
// only one can call ::Create()/GetVoiceEngine()
MOZ_ASSERT(other->mVoiceEngine);
mVoiceEngine = other->mVoiceEngine;
} else {
//Per WebRTC APIs below function calls return NULL on failure
if(!(mVoiceEngine = webrtc::VoiceEngine::Create()))
{
@ -149,7 +107,6 @@ MediaConduitErrorCode WebrtcAudioConduit::Init(WebrtcAudioConduit *other)
mVoiceEngine->SetTraceFilter(logs->level);
mVoiceEngine->SetTraceFile(file);
}
}
if(!(mPtrVoEBase = VoEBase::GetInterface(mVoiceEngine)))
{
@ -169,21 +126,12 @@ MediaConduitErrorCode WebrtcAudioConduit::Init(WebrtcAudioConduit *other)
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoEProcessing = VoEAudioProcessing::GetInterface(mVoiceEngine)))
{
CSFLogError(logTag, "%s Unable to initialize VoEProcessing", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if(!(mPtrVoEXmedia = VoEExternalMedia::GetInterface(mVoiceEngine)))
{
CSFLogError(logTag, "%s Unable to initialize VoEExternalMedia", __FUNCTION__);
return kMediaConduitSessionNotInited;
}
if (other) {
mChannel = other->mChannel;
} else {
// init the engine with our audio device layer
if(mPtrVoEBase->Init() == -1)
{
@ -218,11 +166,11 @@ MediaConduitErrorCode WebrtcAudioConduit::Init(WebrtcAudioConduit *other)
mPtrVoEBase->LastError());
return kMediaConduitExternalRecordingError;
}
CSFLogDebug(logTag , "%s AudioSessionConduit Initialization Done (%p)",__FUNCTION__, this);
}
CSFLogDebug(logTag , "%s AudioSessionConduit Initialization Done",__FUNCTION__);
return kMediaConduitNoError;
}
// AudioSessionConduit Implementation
MediaConduitErrorCode
WebrtcAudioConduit::AttachTransport(mozilla::RefPtr<TransportInterface> aTransport)
@ -287,33 +235,6 @@ WebrtcAudioConduit::ConfigureSendMediaCodec(const AudioCodecConfig* codecConfig)
return kMediaConduitUnknownError;
}
// TEMPORARY - see bug 694814 comment 2
nsresult rv;
nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
if (NS_SUCCEEDED(rv)) {
nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
if (branch) {
int32_t aec = 0; // 0 == unchanged
bool aec_on = false;
branch->GetBoolPref("media.peerconnection.aec_enabled", &aec_on);
branch->GetIntPref("media.peerconnection.aec", &aec);
CSFLogDebug(logTag,"Audio config: aec: %d", aec_on ? aec : -1);
mEchoOn = aec_on;
if (static_cast<webrtc::EcModes>(aec) != webrtc::kEcUnchanged)
mEchoCancel = static_cast<webrtc::EcModes>(aec);
branch->GetIntPref("media.peerconnection.capture_delay", &mCaptureDelay);
}
}
if (0 != (error = mPtrVoEProcessing->SetEcStatus(mEchoOn, mEchoCancel))) {
CSFLogError(logTag,"%s Error setting EVStatus: %d ",__FUNCTION__, error);
return kMediaConduitUnknownError;
}
//Let's Send Transport State-machine on the Engine
if(mPtrVoEBase->StartSend(mChannel) == -1)
{
@ -483,7 +404,7 @@ WebrtcAudioConduit::SendAudioFrame(const int16_t audio_data[],
return kMediaConduitSessionNotInited;
}
capture_delay = mCaptureDelay;
//Insert the samples
if(mPtrVoEXmedia->ExternalRecordingInsertData(audio_data,
lengthSamples,
@ -623,19 +544,8 @@ WebrtcAudioConduit::ReceivedRTCPPacket(const void *data, int len)
int WebrtcAudioConduit::SendPacket(int channel, const void* data, int len)
{
CSFLogDebug(logTag, "%s : channel %d %s",__FUNCTION__,channel,
(mEngineReceiving && mOtherDirection) ? "(using mOtherDirection)" : "");
CSFLogDebug(logTag, "%s : channel %d",__FUNCTION__,channel);
if (mEngineReceiving)
{
if (mOtherDirection)
{
return mOtherDirection->SendPacket(channel, data, len);
}
CSFLogDebug(logTag, "%s : Asked to send RTP without an RTP sender",
__FUNCTION__, channel);
return -1;
} else {
if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK))
{
CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
@ -644,24 +554,16 @@ int WebrtcAudioConduit::SendPacket(int channel, const void* data, int len)
CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
return -1;
}
}
}
int WebrtcAudioConduit::SendRTCPPacket(int channel, const void* data, int len)
{
CSFLogDebug(logTag, "%s : channel %d", __FUNCTION__, channel);
if (mEngineTransmitting)
{
if (mOtherDirection)
{
return mOtherDirection->SendRTCPPacket(channel, data, len);
}
CSFLogDebug(logTag, "%s : Asked to send RTCP without an RTP receiver",
__FUNCTION__, channel);
return -1;
} else {
if(mTransport && mTransport->SendRtcpPacket(data, len) == NS_OK)
// can't enable this assertion, because we do. Suppress it
// NS_ASSERTION(mEngineReceiving,"We shouldn't send RTCP on the receiver side");
if(mEngineReceiving && mTransport && mTransport->SendRtcpPacket(data, len) == NS_OK)
{
CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
return len;
@ -669,7 +571,7 @@ int WebrtcAudioConduit::SendRTCPPacket(int channel, const void* data, int len)
CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__);
return -1;
}
}
}
/**
@ -846,3 +748,4 @@ WebrtcAudioConduit::DumpCodecDB() const
}
}
}// end namespace

Просмотреть файл

@ -18,14 +18,12 @@
#include "voice_engine/include/voe_file.h"
#include "voice_engine/include/voe_network.h"
#include "voice_engine/include/voe_external_media.h"
#include "voice_engine/include/voe_audio_processing.h"
//Some WebRTC types for short notations
using webrtc::VoEBase;
using webrtc::VoENetwork;
using webrtc::VoECodec;
using webrtc::VoEExternalMedia;
using webrtc::VoEAudioProcessing;
/** This file hosts several structures identifying different aspects
* of a RTP Session.
@ -143,23 +141,18 @@ public:
WebrtcAudioConduit():
mOtherDirection(NULL),
mShutDown(false),
mVoiceEngine(NULL),
mTransport(NULL),
mEngineTransmitting(false),
mEngineReceiving(false),
mChannel(-1),
mCurSendCodecConfig(NULL),
mCaptureDelay(150),
mEchoOn(true),
mEchoCancel(webrtc::kEcAec)
mCurSendCodecConfig(NULL)
{
}
virtual ~WebrtcAudioConduit();
MediaConduitErrorCode Init(WebrtcAudioConduit *other);
MediaConduitErrorCode Init();
private:
WebrtcAudioConduit(const WebrtcAudioConduit& other) MOZ_DELETE;
@ -192,19 +185,12 @@ private:
//Utility function to dump recv codec database
void DumpCodecDB() const;
WebrtcAudioConduit* mOtherDirection;
// Other side has shut down our channel and related items already
bool mShutDown;
// These are shared by both directions. They're released by the last
// conduit to die
webrtc::VoiceEngine* mVoiceEngine;
mozilla::RefPtr<TransportInterface> mTransport;
webrtc::VoENetwork* mPtrVoENetwork;
webrtc::VoEBase* mPtrVoEBase;
webrtc::VoECodec* mPtrVoECodec;
webrtc::VoEExternalMedia* mPtrVoEXmedia;
webrtc::VoEAudioProcessing* mPtrVoEProcessing;
//engine states of our interets
bool mEngineTransmitting; // If true => VoiceEngine Send-subsystem is up
@ -214,12 +200,6 @@ private:
int mChannel;
RecvCodecList mRecvCodecList;
AudioCodecConfig* mCurSendCodecConfig;
// Current "capture" delay (really output plus input delay)
int32_t mCaptureDelay;
bool mEchoOn;
webrtc::EcModes mEchoCancel;
};
} // end namespace

Просмотреть файл

@ -226,7 +226,7 @@ public:
* return: Concrete VideoSessionConduitObject or NULL in the case
* of failure
*/
static mozilla::RefPtr<AudioSessionConduit> Create(AudioSessionConduit *aOther);
static mozilla::RefPtr<AudioSessionConduit> Create();
virtual ~AudioSessionConduit() {}

Просмотреть файл

@ -1303,18 +1303,12 @@ static int vcmRxStartICE_m(cc_mcapid_t mcap_id,
if (CC_IS_AUDIO(mcap_id)) {
std::vector<mozilla::AudioCodecConfig *> configs;
// Instantiate an appropriate conduit
mozilla::RefPtr<mozilla::AudioSessionConduit> tx_conduit =
pc.impl()->media()->GetConduit(level, false);
mozilla::RefPtr<mozilla::AudioSessionConduit> conduit =
mozilla::AudioSessionConduit::Create(tx_conduit);
mozilla::AudioSessionConduit::Create();
if(!conduit)
return VCM_ERROR;
pc.impl()->media()->AddConduit(level, true, conduit);
mozilla::AudioCodecConfig *config_raw;
for(int i=0; i <num_payloads ; i++)
@ -1959,17 +1953,12 @@ static int vcmTxStartICE_m(cc_mcapid_t mcap_id,
mozilla::ScopedDeletePtr<mozilla::AudioCodecConfig> config(config_raw);
// Instantiate an appropriate conduit
mozilla::RefPtr<mozilla::AudioSessionConduit> rx_conduit =
pc.impl()->media()->GetConduit(level, true);
mozilla::RefPtr<mozilla::AudioSessionConduit> conduit =
mozilla::AudioSessionConduit::Create(rx_conduit);
mozilla::AudioSessionConduit::Create();
if (!conduit || conduit->ConfigureSendMediaCodec(config))
return VCM_ERROR;
pc.impl()->media()->AddConduit(level, false, conduit);
mozilla::RefPtr<mozilla::MediaPipeline> pipeline =
new mozilla::MediaPipelineTransmit(
pc.impl()->GetHandle(),

Просмотреть файл

@ -293,28 +293,9 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
mozilla::RefPtr<mozilla::TransportFlow> aFlow) {
int index_inner = aIndex * 2 + (aRtcp ? 1 : 0);
MOZ_ASSERT(!mTransportFlows[index_inner]);
mTransportFlows[index_inner] = aFlow;
}
mozilla::RefPtr<mozilla::AudioSessionConduit> GetConduit(int aStreamIndex, bool aReceive) {
int index_inner = aStreamIndex * 2 + (aReceive ? 0 : 1);
if (mAudioConduits.find(index_inner) == mAudioConduits.end())
return NULL;
return mAudioConduits[index_inner];
}
// Add a conduit
void AddConduit(int aIndex, bool aReceive,
const mozilla::RefPtr<mozilla::AudioSessionConduit> &aConduit) {
int index_inner = aIndex * 2 + (aReceive ? 0 : 1);
MOZ_ASSERT(!mAudioConduits[index_inner]);
mAudioConduits[index_inner] = aConduit;
}
// ICE state signals
sigslot::signal1<mozilla::NrIceCtx *> SignalIceGatheringCompleted; // Done gathering
sigslot::signal1<mozilla::NrIceCtx *> SignalIceCompleted; // Done handshaking
@ -350,10 +331,6 @@ class PeerConnectionMedia : public sigslot::has_slots<> {
// Transport flows: even is RTP, odd is RTCP
std::map<int, mozilla::RefPtr<mozilla::TransportFlow> > mTransportFlows;
// Conduits: even is receive, odd is transmit (for easier correlation with
// flows)
std::map<int, mozilla::RefPtr<mozilla::AudioSessionConduit> > mAudioConduits;
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(PeerConnectionMedia)
};

Просмотреть файл

@ -490,11 +490,11 @@ class TransportConduitTest : public ::testing::Test
{
//get pointer to AudioSessionConduit
int err=0;
mAudioSession = mozilla::AudioSessionConduit::Create(NULL);
mAudioSession = mozilla::AudioSessionConduit::Create();
if( !mAudioSession )
ASSERT_NE(mAudioSession, (void*)NULL);
mAudioSession2 = mozilla::AudioSessionConduit::Create(NULL);
mAudioSession2 = mozilla::AudioSessionConduit::Create();
if( !mAudioSession2 )
ASSERT_NE(mAudioSession2, (void*)NULL);

Просмотреть файл

@ -48,7 +48,7 @@ class TestAgent {
audio_prsock_(new TransportLayerPrsock()),
audio_dtls_(new TransportLayerDtls()),
audio_config_(109, "opus", 48000, 480, 1, 64000),
audio_conduit_(mozilla::AudioSessionConduit::Create(NULL)),
audio_conduit_(mozilla::AudioSessionConduit::Create()),
audio_(),
audio_pipeline_(),
video_flow_(new TransportFlow()),

Просмотреть файл

@ -3,9 +3,6 @@
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# definitions to control what gets built in webrtc
# NOTE!!! if you change something here, due to .gyp files not
# being reprocessed on .gypi changes, run this before building:
# "find . -name '*.gyp' | xargs touch"
{
'variables': {
# basic stuff for everything
@ -16,8 +13,6 @@
'include_tests': 0,
'use_system_libjpeg': 1,
'use_system_libvpx': 1,
# Creates AEC internal sample dump files in current directory
# 'aec_debug_dump': 1,
# codec enable/disables:
# Note: if you change one here, you must modify shared_libs.mk!

Просмотреть файл

@ -178,15 +178,6 @@ pref("media.gstreamer.enabled", true);
pref("media.navigator.enabled", true);
pref("media.peerconnection.enabled", false);
pref("media.navigator.permission.disabled", false);
// These values (aec, agc, and noice) are from media/webrtc/trunk/webrtc/common_types.h
// kXxxUnchanged = 0, kXxxDefault = 1, and higher values are specific to each
// setting (for Xxx = Ec, Agc, or Ns). Defaults are all set to kXxxDefault here.
pref("media.peerconnection.aec_enabled", true);
pref("media.peerconnection.aec", 1);
pref("media.peerconnection.agc_enabled", false);
pref("media.peerconnection.agc", 1);
pref("media.peerconnection.noise_enabled", false);
pref("media.peerconnection.noise", 1);
#else
#ifdef ANDROID
pref("media.navigator.enabled", true);