зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1198458: Rollup of changes previously applied to media/webrtc/trunk/webrtc and fixes to those rs=jesup r=froyd,jib,bwc,jesup,gcp,sotaro,pkerr,pehrsons
Landing as one rolled-up patch to avoid breaking regression tests, and in keeping with previous WebRTC imports. Broken out parts that needed review are on the bug.
This commit is contained in:
Родитель
1d652361f7
Коммит
9c83bc8f96
|
@ -156,6 +156,7 @@ bool isInIgnoredNamespaceForImplicitCtor(const Decl *decl) {
|
|||
name == "__gnu_cxx" || // gnu C++ lib
|
||||
name == "boost" || // boost
|
||||
name == "webrtc" || // upstream webrtc
|
||||
name == "rtc" || // upstream webrtc 'base' package
|
||||
name.substr(0, 4) == "icu_" || // icu
|
||||
name == "google" || // protobuf
|
||||
name == "google_breakpad" || // breakpad
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
gyp_vars = {
|
||||
'lsan': 0,
|
||||
'asan': 0,
|
||||
'build_with_mozilla': 1,
|
||||
'build_with_chromium': 0,
|
||||
'use_official_google_api_keys': 0,
|
||||
|
@ -27,14 +29,14 @@ gyp_vars = {
|
|||
'build_libyuv': 0,
|
||||
'build_libvpx': 0,
|
||||
'build_ssl': 0,
|
||||
'build_json': 0,
|
||||
'build_icu': 0,
|
||||
'build_opus': 0,
|
||||
'libyuv_dir': '/media/libyuv',
|
||||
'yuv_disable_avx2': 0 if CONFIG['HAVE_X86_AVX2'] else 1,
|
||||
# don't use openssl
|
||||
'use_openssl': 0,
|
||||
|
||||
# saves 4MB when webrtc_trace is off
|
||||
'enable_lazy_trace_alloc': 1 if CONFIG['RELEASE_BUILD'] else 0,
|
||||
|
||||
'use_x11': 1 if CONFIG['MOZ_X11'] else 0,
|
||||
'use_glib': 1 if CONFIG['GLIB_LIBS'] else 0,
|
||||
|
||||
|
@ -63,7 +65,9 @@ gyp_vars = {
|
|||
'include_opus': 1,
|
||||
'include_g722': 1,
|
||||
'include_ilbc': 0,
|
||||
'include_isac': 0,
|
||||
# We turn on ISAC because the AGC uses parts of it, and depend on the
|
||||
# linker to throw away uneeded bits.
|
||||
'include_isac': 1,
|
||||
'include_pcm16b': 1,
|
||||
}
|
||||
|
||||
|
|
|
@ -301,6 +301,11 @@ if CONFIG['MOZ_WEBRTC']:
|
|||
|
||||
DEFINES['MOZILLA_INTERNAL_API'] = True
|
||||
|
||||
if CONFIG['OS_TARGET'] == 'WINNT':
|
||||
DEFINES['WEBRTC_WIN'] = True
|
||||
else:
|
||||
DEFINES['WEBRTC_POSIX'] = True
|
||||
|
||||
if CONFIG['MOZ_OMX_DECODER']:
|
||||
DEFINES['MOZ_OMX_DECODER'] = True
|
||||
|
||||
|
|
|
@ -51,12 +51,18 @@ enum BufferState
|
|||
bool
|
||||
OMXCodecReservation::ReserveOMXCodec()
|
||||
{
|
||||
if (mClient) {
|
||||
// Already tried reservation.
|
||||
return false;
|
||||
if (!mClient) {
|
||||
mClient = new mozilla::MediaSystemResourceClient(mType);
|
||||
} else {
|
||||
if (mOwned) {
|
||||
//CODEC_ERROR("OMX Reservation: (%d) already owned", (int) mType);
|
||||
return true;
|
||||
}
|
||||
//CODEC_ERROR("OMX Reservation: (%d) already NOT owned", (int) mType);
|
||||
}
|
||||
mClient = new mozilla::MediaSystemResourceClient(mType);
|
||||
return mClient->AcquireSyncNoWait(); // don't wait if resrouce is not available
|
||||
mOwned = mClient->AcquireSyncNoWait(); // don't wait if resource is not available
|
||||
//CODEC_ERROR("OMX Reservation: (%d) Acquire was %s", (int) mType, mOwned ? "Successful" : "Failed");
|
||||
return mOwned;
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -65,7 +71,12 @@ OMXCodecReservation::ReleaseOMXCodec()
|
|||
if (!mClient) {
|
||||
return;
|
||||
}
|
||||
mClient->ReleaseResource();
|
||||
//CODEC_ERROR("OMX Reservation: Releasing resource: (%d) %s", (int) mType, mOwned ? "Owned" : "Not owned");
|
||||
if (mOwned) {
|
||||
mClient->ReleaseResource();
|
||||
mClient = nullptr;
|
||||
mOwned = false;
|
||||
}
|
||||
}
|
||||
|
||||
OMXAudioEncoder*
|
||||
|
|
|
@ -26,7 +26,7 @@ namespace android {
|
|||
class OMXCodecReservation : public RefBase
|
||||
{
|
||||
public:
|
||||
OMXCodecReservation(bool aEncoder)
|
||||
OMXCodecReservation(bool aEncoder) : mOwned(false)
|
||||
{
|
||||
mType = aEncoder ? mozilla::MediaSystemResourceType::VIDEO_ENCODER :
|
||||
mozilla::MediaSystemResourceType::VIDEO_DECODER;
|
||||
|
@ -45,6 +45,7 @@ public:
|
|||
|
||||
private:
|
||||
mozilla::MediaSystemResourceType mType;
|
||||
bool mOwned; // We already own this resource
|
||||
|
||||
RefPtr<mozilla::MediaSystemResourceClient> mClient;
|
||||
};
|
||||
|
|
|
@ -647,7 +647,7 @@ bool
|
|||
CamerasChild::RecvDeliverFrame(const int& capEngine,
|
||||
const int& capId,
|
||||
mozilla::ipc::Shmem&& shmem,
|
||||
const int& size,
|
||||
const size_t& size,
|
||||
const uint32_t& time_stamp,
|
||||
const int64_t& ntp_time,
|
||||
const int64_t& render_time)
|
||||
|
|
|
@ -85,7 +85,7 @@ public:
|
|||
// IPC messages recevied, received on the PBackground thread
|
||||
// these are the actual callbacks with data
|
||||
virtual bool RecvDeliverFrame(const int&, const int&, mozilla::ipc::Shmem&&,
|
||||
const int&, const uint32_t&, const int64_t&,
|
||||
const size_t&, const uint32_t&, const int64_t&,
|
||||
const int64_t&) override;
|
||||
virtual bool RecvFrameSizeChange(const int&, const int&,
|
||||
const int& w, const int& h) override;
|
||||
|
@ -159,9 +159,9 @@ private:
|
|||
Mutex mRequestMutex;
|
||||
// Hold to wait for an async response to our calls
|
||||
Monitor mReplyMonitor;
|
||||
// Async resposne valid?
|
||||
// Async response valid?
|
||||
bool mReceivedReply;
|
||||
// Aynsc reponses data contents;
|
||||
// Async responses data contents;
|
||||
bool mReplySuccess;
|
||||
int mReplyInteger;
|
||||
webrtc::CaptureCapability mReplyCapability;
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
#include "nsThreadUtils.h"
|
||||
#include "nsXPCOM.h"
|
||||
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
|
||||
#undef LOG
|
||||
#undef LOG_ENABLED
|
||||
mozilla::LazyLogModule gCamerasParentLog("CamerasParent");
|
||||
|
@ -90,7 +92,7 @@ public:
|
|||
int cap_id,
|
||||
ShmemBuffer buffer,
|
||||
unsigned char* altbuffer,
|
||||
int size,
|
||||
size_t size,
|
||||
uint32_t time_stamp,
|
||||
int64_t ntp_time,
|
||||
int64_t render_time)
|
||||
|
@ -136,7 +138,7 @@ private:
|
|||
int mCapId;
|
||||
ShmemBuffer mBuffer;
|
||||
mozilla::UniquePtr<unsigned char[]> mAlternateBuffer;
|
||||
int mSize;
|
||||
size_t mSize;
|
||||
uint32_t mTimeStamp;
|
||||
int64_t mNtpTime;
|
||||
int64_t mRenderTime;
|
||||
|
@ -232,7 +234,7 @@ CamerasParent::DeliverFrameOverIPC(CaptureEngine cap_engine,
|
|||
int cap_id,
|
||||
ShmemBuffer buffer,
|
||||
unsigned char* altbuffer,
|
||||
int size,
|
||||
size_t size,
|
||||
uint32_t time_stamp,
|
||||
int64_t ntp_time,
|
||||
int64_t render_time)
|
||||
|
@ -281,7 +283,7 @@ CamerasParent::GetBuffer(size_t aSize)
|
|||
|
||||
int
|
||||
CallbackHelper::DeliverFrame(unsigned char* buffer,
|
||||
int size,
|
||||
size_t size,
|
||||
uint32_t time_stamp,
|
||||
int64_t ntp_time,
|
||||
int64_t render_time,
|
||||
|
@ -310,6 +312,17 @@ CallbackHelper::DeliverFrame(unsigned char* buffer,
|
|||
thread->Dispatch(runnable, NS_DISPATCH_NORMAL);
|
||||
return 0;
|
||||
}
|
||||
// XXX!!! FIX THIS -- we should move to pure DeliverI420Frame
|
||||
int
|
||||
CallbackHelper::DeliverI420Frame(const webrtc::I420VideoFrame& webrtc_frame)
|
||||
{
|
||||
return DeliverFrame(const_cast<uint8_t*>(webrtc_frame.buffer(webrtc::kYPlane)),
|
||||
CalcBufferSize(webrtc::kI420, webrtc_frame.width(), webrtc_frame.height()),
|
||||
webrtc_frame.timestamp(),
|
||||
webrtc_frame.ntp_time_ms(),
|
||||
webrtc_frame.render_time_ms(),
|
||||
(void*) webrtc_frame.native_handle());
|
||||
}
|
||||
|
||||
bool
|
||||
CamerasParent::RecvReleaseFrame(mozilla::ipc::Shmem&& s) {
|
||||
|
|
|
@ -41,11 +41,12 @@ public:
|
|||
virtual int FrameSizeChange(unsigned int w, unsigned int h,
|
||||
unsigned int streams) override;
|
||||
virtual int DeliverFrame(unsigned char* buffer,
|
||||
int size,
|
||||
size_t size,
|
||||
uint32_t time_stamp,
|
||||
int64_t ntp_time,
|
||||
int64_t render_time,
|
||||
void *handle) override;
|
||||
virtual int DeliverI420Frame(const webrtc::I420VideoFrame& webrtc_frame) override;
|
||||
virtual bool IsTextureSupported() override { return false; };
|
||||
|
||||
friend CamerasParent;
|
||||
|
@ -108,7 +109,7 @@ public:
|
|||
int cap_id,
|
||||
ShmemBuffer buffer,
|
||||
unsigned char* altbuffer,
|
||||
int size,
|
||||
size_t size,
|
||||
uint32_t time_stamp,
|
||||
int64_t ntp_time,
|
||||
int64_t render_time);
|
||||
|
|
|
@ -170,14 +170,6 @@ LoadManagerSingleton::AddObserver(webrtc::CPULoadStateObserver * aObserver)
|
|||
LOG(("LoadManager - Adding Observer"));
|
||||
MutexAutoLock lock(mLock);
|
||||
mObservers.AppendElement(aObserver);
|
||||
if (mObservers.Length() == 1) {
|
||||
if (!mLoadMonitor) {
|
||||
mLoadMonitor = new LoadMonitor(mLoadMeasurementInterval);
|
||||
mLoadMonitor->Init(mLoadMonitor);
|
||||
mLoadMonitor->SetLoadChangeCallback(this);
|
||||
mLastStateChange = TimeStamp::Now();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#include "nsAutoPtr.h"
|
||||
#include "nsThreadUtils.h"
|
||||
#include "nsIAsyncShutdown.h"
|
||||
#include "base/task.h"
|
||||
|
||||
namespace mozilla {
|
||||
namespace media {
|
||||
|
|
|
@ -27,7 +27,7 @@ child:
|
|||
async FrameSizeChange(int capEngine, int cap_id, int w, int h);
|
||||
// transfers ownership of |buffer| from parent to child
|
||||
async DeliverFrame(int capEngine, int cap_id,
|
||||
Shmem buffer, int size, uint32_t time_stamp,
|
||||
Shmem buffer, size_t size, uint32_t time_stamp,
|
||||
int64_t ntp_time, int64_t render_time);
|
||||
async ReplyNumberOfCaptureDevices(int numdev);
|
||||
async ReplyNumberOfCapabilities(int numdev);
|
||||
|
|
|
@ -26,6 +26,11 @@ if CONFIG['MOZ_WEBRTC']:
|
|||
'/media/webrtc/signaling',
|
||||
'/media/webrtc/trunk',
|
||||
]
|
||||
if CONFIG['OS_TARGET'] == 'WINNT':
|
||||
DEFINES['WEBRTC_WIN'] = True
|
||||
else:
|
||||
DEFINES['WEBRTC_POSIX'] = True
|
||||
|
||||
|
||||
if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('android', 'gonk'):
|
||||
EXPORTS += [
|
||||
|
|
|
@ -135,8 +135,8 @@ skip-if = toolkit == 'gonk' || buildapp == 'mulet' # b2g(Bug 960442, video suppo
|
|||
skip-if = toolkit == 'gonk' || buildapp == 'mulet' || (android_version == '18' && debug) # b2g(Bug 960442, video support for WebRTC is disabled on b2g), android(Bug 1189784, timeouts on 4.3 emulator)
|
||||
[test_peerConnection_promiseSendOnly.html]
|
||||
skip-if = toolkit == 'gonk' || buildapp == 'mulet' || (android_version == '18' && debug) # b2g(Bug 960442, video support for WebRTC is disabled on b2g), android(Bug 1189784, timeouts on 4.3 emulator)
|
||||
[test_peerConnection_relayOnly.html]
|
||||
skip-if = toolkit == 'gonk' || buildapp == 'mulet' # b2g(Bug 960442, video support for WebRTC is disabled on b2g)
|
||||
#[test_peerConnection_relayOnly.html]
|
||||
#skip-if = toolkit == 'gonk' || buildapp == 'mulet' # b2g(Bug 960442, video support for WebRTC is disabled on b2g)
|
||||
[test_peerConnection_callbacks.html]
|
||||
skip-if = toolkit == 'gonk' || buildapp == 'mulet' || (android_version == '18' && debug) # b2g(Bug 960442, video support for WebRTC is disabled on b2g), android(Bug 1189784, timeouts on 4.3 emulator)
|
||||
[test_peerConnection_replaceTrack.html]
|
||||
|
|
|
@ -276,7 +276,7 @@ MediaEngineRemoteVideoSource::FrameSizeChange(unsigned int w, unsigned int h,
|
|||
|
||||
int
|
||||
MediaEngineRemoteVideoSource::DeliverFrame(unsigned char* buffer,
|
||||
int size,
|
||||
size_t size,
|
||||
uint32_t time_stamp,
|
||||
int64_t ntp_time,
|
||||
int64_t render_time,
|
||||
|
@ -288,7 +288,7 @@ MediaEngineRemoteVideoSource::DeliverFrame(unsigned char* buffer,
|
|||
return 0;
|
||||
}
|
||||
|
||||
if (mWidth*mHeight + 2*(((mWidth+1)/2)*((mHeight+1)/2)) != size) {
|
||||
if ((size_t) (mWidth*mHeight + 2*(((mWidth+1)/2)*((mHeight+1)/2))) != size) {
|
||||
MOZ_ASSERT(false, "Wrong size frame in DeliverFrame!");
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -38,6 +38,10 @@
|
|||
|
||||
#include "NullTransport.h"
|
||||
|
||||
namespace webrtc {
|
||||
class I420VideoFrame;
|
||||
}
|
||||
|
||||
namespace mozilla {
|
||||
|
||||
/**
|
||||
|
@ -53,11 +57,13 @@ public:
|
|||
virtual int FrameSizeChange(unsigned int w, unsigned int h,
|
||||
unsigned int streams) override;
|
||||
virtual int DeliverFrame(unsigned char* buffer,
|
||||
int size,
|
||||
size_t size,
|
||||
uint32_t time_stamp,
|
||||
int64_t ntp_time,
|
||||
int64_t render_time,
|
||||
void *handle) override;
|
||||
// XXX!!!! FIX THIS
|
||||
virtual int DeliverI420Frame(const webrtc::I420VideoFrame& webrtc_frame) override { return 0; };
|
||||
virtual bool IsTextureSupported() override { return false; };
|
||||
|
||||
// MediaEngineCameraVideoSource
|
||||
|
|
|
@ -251,7 +251,7 @@ MediaEngineWebRTC::EnumerateAudioDevices(dom::MediaSourceEnum aMediaSource,
|
|||
JNIEnv* const env = jni::GetEnvForThread();
|
||||
MOZ_ALWAYS_TRUE(!env->GetJavaVM(&jvm));
|
||||
|
||||
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, env, (void*)context) != 0) {
|
||||
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
|
||||
LOG(("VoiceEngine:SetAndroidObjects Failed"));
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -20,6 +20,10 @@ EXPORTS += [
|
|||
]
|
||||
|
||||
if CONFIG['MOZ_WEBRTC']:
|
||||
if CONFIG['OS_TARGET'] == 'WINNT':
|
||||
DEFINES['WEBRTC_WIN'] = True
|
||||
else:
|
||||
DEFINES['WEBRTC_POSIX'] = True
|
||||
EXPORTS += ['AudioOutputObserver.h',
|
||||
'MediaEngineRemoteVideoSource.h',
|
||||
'MediaEngineWebRTC.h']
|
||||
|
|
|
@ -39,6 +39,7 @@ EXPORTS.mozilla.ipc += [
|
|||
]
|
||||
|
||||
if CONFIG['OS_ARCH'] == 'WINNT':
|
||||
DEFINES['WEBRTC_WIN'] = True
|
||||
EXPORTS.mozilla.ipc += [
|
||||
'Transport_win.h',
|
||||
]
|
||||
|
@ -48,6 +49,7 @@ if CONFIG['OS_ARCH'] == 'WINNT':
|
|||
'WindowsMessageLoop.cpp',
|
||||
]
|
||||
else:
|
||||
DEFINES['WEBRTC_POSIX'] = True
|
||||
EXPORTS.mozilla.ipc += [
|
||||
'Transport_posix.h',
|
||||
]
|
||||
|
|
|
@ -175,7 +175,8 @@ private:
|
|||
~SingletonThreadHolder()
|
||||
{
|
||||
r_log(LOG_GENERIC,LOG_DEBUG,"Deleting SingletonThreadHolder");
|
||||
if (NS_WARN_IF(mThread)) {
|
||||
MOZ_ASSERT(!mThread, "SingletonThreads should be Released and shut down before exit!");
|
||||
if (mThread) {
|
||||
mThread->Shutdown();
|
||||
mThread = nullptr;
|
||||
}
|
||||
|
@ -184,7 +185,7 @@ private:
|
|||
DISALLOW_COPY_ASSIGN(SingletonThreadHolder);
|
||||
|
||||
public:
|
||||
// Must be threadsafe for ClearOnShutdown
|
||||
// Must be threadsafe for StaticRefPtr/ClearOnShutdown
|
||||
NS_INLINE_DECL_THREADSAFE_REFCOUNTING(SingletonThreadHolder)
|
||||
|
||||
explicit SingletonThreadHolder(const nsCSubstring& aName)
|
||||
|
@ -230,7 +231,8 @@ public:
|
|||
mThread.get());
|
||||
mThread->Shutdown();
|
||||
mThread = nullptr;
|
||||
// It'd be nice to use a timer instead...
|
||||
// It'd be nice to use a timer instead... But be careful of
|
||||
// xpcom-shutdown-threads in that case
|
||||
}
|
||||
r_log(LOG_GENERIC,LOG_DEBUG,"ReleaseUse: %lu", (unsigned long) count);
|
||||
return count;
|
||||
|
|
|
@ -13,7 +13,10 @@ webrtc_non_unified_sources = [
|
|||
'trunk/webrtc/modules/audio_coding/codecs/g722/g722_encode.c', # Because of name clash in the saturate function
|
||||
'trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c', # Because of name clash in the kDampFilter variable
|
||||
'trunk/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c', # Because of name clash in the kDampFilter variable
|
||||
'trunk/webrtc/modules/audio_coding/main/acm2/codec_manager.cc', # Because of duplicate IsCodecRED/etc
|
||||
'trunk/webrtc/modules/audio_coding/neteq/audio_vector.cc', # Because of explicit template specializations
|
||||
'trunk/webrtc/modules/audio_device/android/audio_record_jni.cc', # Becuse of commonly named module static vars
|
||||
'trunk/webrtc/modules/audio_device/android/audio_track_jni.cc', # Becuse of commonly named module static vars
|
||||
'trunk/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc', # Because of LATE()
|
||||
'trunk/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc',# Because of LATE()
|
||||
'trunk/webrtc/modules/audio_device/opensl/opensles_input.cc', # Because of name clash in the kOption variable
|
||||
|
@ -23,17 +26,23 @@ webrtc_non_unified_sources = [
|
|||
'trunk/webrtc/modules/audio_processing/aec/aec_core.c', # Because of name clash in the ComfortNoise function
|
||||
'trunk/webrtc/modules/audio_processing/aecm/aecm_core.c', # Because of name clash in the ComfortNoise function
|
||||
'trunk/webrtc/modules/audio_processing/aecm/echo_control_mobile.c', # Because of name clash in the kInitCheck variable
|
||||
'trunk/webrtc/modules/audio_processing/agc/analog_agc.c', # Because of name clash in the kInitCheck variable
|
||||
'trunk/webrtc/modules/audio_processing/agc/histogram.cc', # Because of duplicate definition of static consts with pitch_based_vad.cc
|
||||
'trunk/webrtc/modules/audio_processing/agc/legacy/analog_agc.c', # Because of name clash in the kInitCheck variable
|
||||
'trunk/webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc', # Because of needing to define _USE_MATH_DEFINES before including <cmath>
|
||||
'trunk/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.cc', # Because of needing to define _USE_MATH_DEFINES before including <cmath>
|
||||
'trunk/webrtc/modules/audio_processing/echo_cancellation_impl.cc', # Because of name clash in the MapError function
|
||||
'trunk/webrtc/modules/audio_processing/echo_control_mobile_impl.cc', # Because of name clash in the MapError function
|
||||
'trunk/webrtc/modules/audio_processing/gain_control_impl.cc', # Because of name clash in the Handle typedef
|
||||
'trunk/webrtc/modules/audio_processing/high_pass_filter_impl.cc', # Because of name clash in the Handle typedef
|
||||
'trunk/webrtc/modules/audio_processing/noise_suppression_impl.cc', # Because of name clash in the Handle typedef
|
||||
'trunk/webrtc/modules/remote_bitrate_estimator/mimd_rate_control.cc', # Because of duplicate definitions of static consts against aimd_rate_control.cc
|
||||
'trunk/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc', # Because of duplicate definitions of static consts against remote_bitrate_estimator_abs_send_time.cc
|
||||
'trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.mm', # Because of name clash in the nsAutoreleasePool class
|
||||
'trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm', # Because of name clash in the nsAutoreleasePool class
|
||||
'trunk/webrtc/modules/video_capture/windows/device_info_ds.cc', # Because of the MEDIASUBTYPE_HDYC variable
|
||||
'trunk/webrtc/modules/video_capture/windows/help_functions_ds.cc', # Because of initguid.h
|
||||
'trunk/webrtc/modules/video_capture/windows/sink_filter_ds.cc', # Because of the MEDIASUBTYPE_HDYC variable and initguid.h
|
||||
'trunk/webrtc/video_engine/overuse_frame_detector.cc', # Because of name clash with call_stats.cc on kWeightFactor
|
||||
]
|
||||
|
||||
GYP_DIRS += ['trunk']
|
||||
|
|
|
@ -329,6 +329,7 @@
|
|||
'defines': [
|
||||
'OS_WIN',
|
||||
'SIP_OS_WINDOWS',
|
||||
'WEBRTC_WIN',
|
||||
'WIN32',
|
||||
'GIPS_VER=3480',
|
||||
'SIPCC_BUILD',
|
||||
|
|
|
@ -18,13 +18,13 @@ namespace mozilla {
|
|||
class NullTransport : public webrtc::Transport
|
||||
{
|
||||
public:
|
||||
virtual int SendPacket(int channel, const void *data, int len)
|
||||
virtual int SendPacket(int channel, const void *data, size_t len)
|
||||
{
|
||||
(void) channel; (void) data;
|
||||
return len;
|
||||
}
|
||||
|
||||
virtual int SendRTCPPacket(int channel, const void *data, int len)
|
||||
virtual int SendRTCPPacket(int channel, const void *data, size_t len)
|
||||
{
|
||||
(void) channel; (void) data;
|
||||
return len;
|
||||
|
|
|
@ -223,12 +223,10 @@ MediaConduitErrorCode WebrtcAudioConduit::Init()
|
|||
|
||||
#ifdef MOZ_WIDGET_ANDROID
|
||||
jobject context = jsjni_GetGlobalContextRef();
|
||||
|
||||
// get the JVM
|
||||
JavaVM *jvm = jsjni_GetVM();
|
||||
JNIEnv* jenv = jsjni_GetJNIForThread();
|
||||
|
||||
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, jenv, (void*)context) != 0) {
|
||||
if (webrtc::VoiceEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
|
||||
CSFLogError(logTag, "%s Unable to set Android objects", __FUNCTION__);
|
||||
return kMediaConduitSessionNotInited;
|
||||
}
|
||||
|
@ -839,7 +837,7 @@ WebrtcAudioConduit::StartReceiving()
|
|||
|
||||
//WebRTC::RTP Callback Implementation
|
||||
// Called on AudioGUM or MSG thread
|
||||
int WebrtcAudioConduit::SendPacket(int channel, const void* data, int len)
|
||||
int WebrtcAudioConduit::SendPacket(int channel, const void* data, size_t len)
|
||||
{
|
||||
CSFLogDebug(logTag, "%s : channel %d", __FUNCTION__, channel);
|
||||
|
||||
|
@ -868,12 +866,12 @@ int WebrtcAudioConduit::SendPacket(int channel, const void* data, int len)
|
|||
}
|
||||
|
||||
// Called on WebRTC Process thread and perhaps others
|
||||
int WebrtcAudioConduit::SendRTCPPacket(int channel, const void* data, int len)
|
||||
int WebrtcAudioConduit::SendRTCPPacket(int channel, const void* data, size_t len)
|
||||
{
|
||||
CSFLogDebug(logTag, "%s : channel %d , len %d, first rtcp = %u ",
|
||||
CSFLogDebug(logTag, "%s : channel %d , len %lu, first rtcp = %u ",
|
||||
__FUNCTION__,
|
||||
channel,
|
||||
len,
|
||||
(unsigned long) len,
|
||||
static_cast<unsigned>(((uint8_t *) data)[1]));
|
||||
|
||||
// We come here if we have only one pipeline/conduit setup,
|
||||
|
|
|
@ -150,13 +150,13 @@ public:
|
|||
* Webrtc transport implementation to send and receive RTP packet.
|
||||
* AudioConduit registers itself as ExternalTransport to the VoiceEngine
|
||||
*/
|
||||
virtual int SendPacket(int channel, const void *data, int len) override;
|
||||
virtual int SendPacket(int channel, const void *data, size_t len) override;
|
||||
|
||||
/**
|
||||
* Webrtc transport implementation to send and receive RTCP packet.
|
||||
* AudioConduit registers itself as ExternalTransport to the VoiceEngine
|
||||
*/
|
||||
virtual int SendRTCPPacket(int channel, const void *data, int len) override;
|
||||
virtual int SendRTCPPacket(int channel, const void *data, size_t len) override;
|
||||
|
||||
|
||||
virtual uint64_t CodecPluginID() override { return 0; }
|
||||
|
|
|
@ -16,6 +16,9 @@
|
|||
#include "ImageContainer.h"
|
||||
|
||||
#include "webrtc/common_types.h"
|
||||
namespace webrtc {
|
||||
class I420VideoFrame;
|
||||
}
|
||||
|
||||
#include <vector>
|
||||
|
||||
|
@ -106,7 +109,14 @@ public:
|
|||
* inside until it's no longer needed.
|
||||
*/
|
||||
virtual void RenderVideoFrame(const unsigned char* buffer,
|
||||
unsigned int buffer_size,
|
||||
size_t buffer_size,
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time,
|
||||
const ImageHandle& handle) = 0;
|
||||
virtual void RenderVideoFrame(const unsigned char* buffer,
|
||||
size_t buffer_size,
|
||||
uint32_t y_stride,
|
||||
uint32_t cbcr_stride,
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time,
|
||||
const ImageHandle& handle) = 0;
|
||||
|
@ -300,6 +310,7 @@ public:
|
|||
unsigned short height,
|
||||
VideoType video_type,
|
||||
uint64_t capture_time) = 0;
|
||||
virtual MediaConduitErrorCode SendVideoFrame(webrtc::I420VideoFrame& frame) = 0;
|
||||
|
||||
virtual MediaConduitErrorCode ConfigureCodecMode(webrtc::VideoCodecMode) = 0;
|
||||
/**
|
||||
|
|
|
@ -14,9 +14,11 @@
|
|||
#include "nsServiceManagerUtils.h"
|
||||
#include "nsIPrefService.h"
|
||||
#include "nsIPrefBranch.h"
|
||||
#include "mozilla/media/MediaUtils.h"
|
||||
|
||||
#include "webrtc/common_types.h"
|
||||
#include "webrtc/common_video/interface/native_handle.h"
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "webrtc/video_engine/include/vie_errors.h"
|
||||
#include "browser_logging/WebRtcLog.h"
|
||||
|
||||
|
@ -75,6 +77,9 @@ WebrtcVideoConduit::WebrtcVideoConduit():
|
|||
mChannel(-1),
|
||||
mCapId(-1),
|
||||
mCodecMutex("VideoConduit codec db"),
|
||||
mInReconfig(false),
|
||||
mLastWidth(0), // forces a check for reconfig at start
|
||||
mLastHeight(0),
|
||||
mSendingWidth(0),
|
||||
mSendingHeight(0),
|
||||
mReceivingWidth(640),
|
||||
|
@ -88,76 +93,16 @@ WebrtcVideoConduit::WebrtcVideoConduit():
|
|||
mStartBitrate(300),
|
||||
mMaxBitrate(2000),
|
||||
mCodecMode(webrtc::kRealtimeVideo)
|
||||
{
|
||||
}
|
||||
{}
|
||||
|
||||
WebrtcVideoConduit::~WebrtcVideoConduit()
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
|
||||
CSFLogDebug(logTag, "%s ", __FUNCTION__);
|
||||
|
||||
for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++)
|
||||
{
|
||||
delete mRecvCodecList[i];
|
||||
}
|
||||
|
||||
// The first one of a pair to be deleted shuts down media for both
|
||||
//Deal with External Capturer
|
||||
if(mPtrViECapture)
|
||||
{
|
||||
mPtrViECapture->DisconnectCaptureDevice(mCapId);
|
||||
mPtrViECapture->ReleaseCaptureDevice(mCapId);
|
||||
mPtrExtCapture = nullptr;
|
||||
}
|
||||
|
||||
if (mPtrExtCodec) {
|
||||
mPtrExtCodec->Release();
|
||||
mPtrExtCodec = NULL;
|
||||
}
|
||||
|
||||
//Deal with External Renderer
|
||||
if(mPtrViERender)
|
||||
{
|
||||
if(mRenderer) {
|
||||
mPtrViERender->StopRender(mChannel);
|
||||
}
|
||||
mPtrViERender->RemoveRenderer(mChannel);
|
||||
}
|
||||
|
||||
//Deal with the transport
|
||||
if(mPtrViENetwork)
|
||||
{
|
||||
mPtrViENetwork->DeregisterSendTransport(mChannel);
|
||||
}
|
||||
|
||||
if(mPtrViEBase)
|
||||
{
|
||||
mPtrViEBase->StopSend(mChannel);
|
||||
mPtrViEBase->StopReceive(mChannel);
|
||||
SyncTo(nullptr);
|
||||
mPtrViEBase->DeleteChannel(mChannel);
|
||||
}
|
||||
|
||||
// mVideoCodecStat has a back-ptr to mPtrViECodec that must be released first
|
||||
if (mVideoCodecStat) {
|
||||
mVideoCodecStat->EndOfCallStats();
|
||||
}
|
||||
mVideoCodecStat = nullptr;
|
||||
// We can't delete the VideoEngine until all these are released!
|
||||
// And we can't use a Scoped ptr, since the order is arbitrary
|
||||
mPtrViEBase = nullptr;
|
||||
mPtrViECapture = nullptr;
|
||||
mPtrViECodec = nullptr;
|
||||
mPtrViENetwork = nullptr;
|
||||
mPtrViERender = nullptr;
|
||||
mPtrRTP = nullptr;
|
||||
mPtrExtCodec = nullptr;
|
||||
|
||||
// only one opener can call Delete. Have it be the last to close.
|
||||
if(mVideoEngine)
|
||||
{
|
||||
webrtc::VideoEngine::Delete(mVideoEngine);
|
||||
}
|
||||
// Release AudioConduit first by dropping reference on MainThread, where it expects to be
|
||||
SyncTo(nullptr);
|
||||
Destroy();
|
||||
}
|
||||
|
||||
bool WebrtcVideoConduit::SetLocalSSRC(unsigned int ssrc)
|
||||
|
@ -236,7 +181,7 @@ bool WebrtcVideoConduit::GetVideoEncoderStats(double* framerateMean,
|
|||
(mLastFramerateTenths/10.0), *framerateMean);
|
||||
MutexAutoLock lock(mCodecMutex);
|
||||
mLastFramerateTenths = *framerateMean * 10;
|
||||
SelectSendResolution(mSendingWidth, mSendingHeight);
|
||||
SelectSendResolution(mSendingWidth, mSendingHeight, nullptr);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -267,7 +212,7 @@ bool WebrtcVideoConduit::GetRTPStats(unsigned int* jitterMs,
|
|||
unsigned int* cumulativeLost) {
|
||||
unsigned short fractionLost;
|
||||
unsigned extendedMax;
|
||||
int rttMs;
|
||||
int64_t rttMs;
|
||||
// GetReceivedRTCPStatistics is a poorly named GetRTPStatistics variant
|
||||
return !mPtrRTP->GetReceivedRTCPStatistics(mChannel, fractionLost,
|
||||
*cumulativeLost,
|
||||
|
@ -311,14 +256,9 @@ bool WebrtcVideoConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs initialization of the MANDATORY components of the Video Engine
|
||||
*/
|
||||
MediaConduitErrorCode
|
||||
WebrtcVideoConduit::Init()
|
||||
WebrtcVideoConduit::InitMain()
|
||||
{
|
||||
CSFLogDebug(logTag, "%s this=%p", __FUNCTION__, this);
|
||||
|
||||
#if defined(MOZILLA_INTERNAL_API) && !defined(MOZILLA_XPCOMRT_API)
|
||||
// already know we must be on MainThread barring unit test weirdness
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
|
@ -352,8 +292,8 @@ WebrtcVideoConduit::Init()
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
EnableWebRtcLog();
|
||||
#ifdef MOZ_WIDGET_ANDROID
|
||||
// get the JVM
|
||||
JavaVM *jvm = jsjni_GetVM();
|
||||
|
@ -363,6 +303,24 @@ WebrtcVideoConduit::Init()
|
|||
return kMediaConduitSessionNotInited;
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
return kMediaConduitNoError;
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs initialization of the MANDATORY components of the Video Engine
|
||||
*/
|
||||
MediaConduitErrorCode
|
||||
WebrtcVideoConduit::Init()
|
||||
{
|
||||
CSFLogDebug(logTag, "%s this=%p", __FUNCTION__, this);
|
||||
MediaConduitErrorCode result;
|
||||
// Run code that must run on MainThread first
|
||||
MOZ_ASSERT(NS_IsMainThread());
|
||||
result = InitMain();
|
||||
if (result != kMediaConduitNoError) {
|
||||
return result;
|
||||
}
|
||||
|
||||
// Per WebRTC APIs below function calls return nullptr on failure
|
||||
mVideoEngine = webrtc::VideoEngine::Create();
|
||||
|
@ -372,8 +330,6 @@ WebrtcVideoConduit::Init()
|
|||
return kMediaConduitSessionNotInited;
|
||||
}
|
||||
|
||||
EnableWebRtcLog();
|
||||
|
||||
if( !(mPtrViEBase = ViEBase::GetInterface(mVideoEngine)))
|
||||
{
|
||||
CSFLogError(logTag, "%s Unable to get video base interface ", __FUNCTION__);
|
||||
|
@ -492,6 +448,72 @@ WebrtcVideoConduit::Init()
|
|||
return kMediaConduitNoError;
|
||||
}
|
||||
|
||||
void
|
||||
WebrtcVideoConduit::Destroy()
|
||||
{
|
||||
for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++)
|
||||
{
|
||||
delete mRecvCodecList[i];
|
||||
}
|
||||
|
||||
// The first one of a pair to be deleted shuts down media for both
|
||||
//Deal with External Capturer
|
||||
if(mPtrViECapture)
|
||||
{
|
||||
mPtrViECapture->DisconnectCaptureDevice(mCapId);
|
||||
mPtrViECapture->ReleaseCaptureDevice(mCapId);
|
||||
mPtrExtCapture = nullptr;
|
||||
}
|
||||
|
||||
if (mPtrExtCodec) {
|
||||
mPtrExtCodec->Release();
|
||||
mPtrExtCodec = NULL;
|
||||
}
|
||||
|
||||
//Deal with External Renderer
|
||||
if(mPtrViERender)
|
||||
{
|
||||
if(mRenderer) {
|
||||
mPtrViERender->StopRender(mChannel);
|
||||
}
|
||||
mPtrViERender->RemoveRenderer(mChannel);
|
||||
}
|
||||
|
||||
//Deal with the transport
|
||||
if(mPtrViENetwork)
|
||||
{
|
||||
mPtrViENetwork->DeregisterSendTransport(mChannel);
|
||||
}
|
||||
|
||||
if(mPtrViEBase)
|
||||
{
|
||||
mPtrViEBase->StopSend(mChannel);
|
||||
mPtrViEBase->StopReceive(mChannel);
|
||||
mPtrViEBase->DeleteChannel(mChannel);
|
||||
}
|
||||
|
||||
// mVideoCodecStat has a back-ptr to mPtrViECodec that must be released first
|
||||
if (mVideoCodecStat) {
|
||||
mVideoCodecStat->EndOfCallStats();
|
||||
}
|
||||
mVideoCodecStat = nullptr;
|
||||
// We can't delete the VideoEngine until all these are released!
|
||||
// And we can't use a Scoped ptr, since the order is arbitrary
|
||||
mPtrViEBase = nullptr;
|
||||
mPtrViECapture = nullptr;
|
||||
mPtrViECodec = nullptr;
|
||||
mPtrViENetwork = nullptr;
|
||||
mPtrViERender = nullptr;
|
||||
mPtrRTP = nullptr;
|
||||
mPtrExtCodec = nullptr;
|
||||
|
||||
// only one opener can call Delete. Have it be the last to close.
|
||||
if(mVideoEngine)
|
||||
{
|
||||
webrtc::VideoEngine::Delete(mVideoEngine);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
WebrtcVideoConduit::SyncTo(WebrtcAudioConduit *aConduit)
|
||||
{
|
||||
|
@ -504,6 +526,9 @@ WebrtcVideoConduit::SyncTo(WebrtcAudioConduit *aConduit)
|
|||
mPtrViEBase->SetVoiceEngine(aConduit->GetVoiceEngine());
|
||||
mPtrViEBase->ConnectAudioChannel(mChannel, aConduit->GetChannel());
|
||||
// NOTE: this means the VideoConduit will keep the AudioConduit alive!
|
||||
} else {
|
||||
mPtrViEBase->DisconnectAudioChannel(mChannel);
|
||||
mPtrViEBase->SetVoiceEngine(nullptr);
|
||||
}
|
||||
|
||||
mSyncedTo = aConduit;
|
||||
|
@ -518,7 +543,7 @@ WebrtcVideoConduit::AttachRenderer(RefPtr<VideoRenderer> aVideoRenderer)
|
|||
if(!aVideoRenderer)
|
||||
{
|
||||
CSFLogError(logTag, "%s NULL Renderer", __FUNCTION__);
|
||||
MOZ_ASSERT(PR_FALSE);
|
||||
MOZ_ASSERT(false);
|
||||
return kMediaConduitInvalidRenderer;
|
||||
}
|
||||
|
||||
|
@ -895,7 +920,7 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs(
|
|||
mFrameRequestMethod = FrameRequestFir;
|
||||
break;
|
||||
default:
|
||||
MOZ_ASSERT(PR_FALSE);
|
||||
MOZ_ASSERT(false);
|
||||
mFrameRequestMethod = FrameRequestUnknown;
|
||||
}
|
||||
|
||||
|
@ -932,10 +957,11 @@ WebrtcVideoConduit::ConfigureRecvMediaCodecs(
|
|||
return kMediaConduitNoError;
|
||||
}
|
||||
|
||||
void
|
||||
WebrtcVideoConduit::SelectBandwidth(webrtc::VideoCodec& vie_codec,
|
||||
unsigned short width,
|
||||
unsigned short height)
|
||||
static void
|
||||
SelectBandwidth(webrtc::VideoCodec& vie_codec,
|
||||
unsigned short width,
|
||||
unsigned short height,
|
||||
mozilla::Atomic<int32_t, mozilla::Relaxed>& aLastFramerateTenths)
|
||||
{
|
||||
// max bandwidth should be proportional (not linearly!) to resolution, and
|
||||
// proportional (perhaps linearly, or close) to current frame rate.
|
||||
|
@ -984,7 +1010,7 @@ WebrtcVideoConduit::SelectBandwidth(webrtc::VideoCodec& vie_codec,
|
|||
}
|
||||
|
||||
// mLastFramerateTenths is an atomic, and scaled by *10
|
||||
double framerate = std::min((mLastFramerateTenths/10.),60.0);
|
||||
double framerate = std::min((aLastFramerateTenths/10.),60.0);
|
||||
MOZ_ASSERT(framerate > 0);
|
||||
// Now linear reduction/increase based on fps (max 60fps i.e. doubling)
|
||||
if (framerate >= 10) {
|
||||
|
@ -999,15 +1025,19 @@ WebrtcVideoConduit::SelectBandwidth(webrtc::VideoCodec& vie_codec,
|
|||
}
|
||||
|
||||
// XXX we need to figure out how to feed back changes in preferred capture
|
||||
// resolution to the getUserMedia source
|
||||
// Invoked under lock of mCodecMutex!
|
||||
// resolution to the getUserMedia source.
|
||||
// Returns boolean if we've submitted an async change (and took ownership
|
||||
// of *frame's data)
|
||||
bool
|
||||
WebrtcVideoConduit::SelectSendResolution(unsigned short width,
|
||||
unsigned short height)
|
||||
unsigned short height,
|
||||
webrtc::I420VideoFrame *frame) // may be null
|
||||
{
|
||||
mCodecMutex.AssertCurrentThreadOwns();
|
||||
// XXX This will do bandwidth-resolution adaptation as well - bug 877954
|
||||
|
||||
mLastWidth = width;
|
||||
mLastHeight = height;
|
||||
// Limit resolution to max-fs while keeping same aspect ratio as the
|
||||
// incoming image.
|
||||
if (mCurSendCodecConfig && mCurSendCodecConfig->mMaxFrameSize)
|
||||
|
@ -1094,36 +1124,93 @@ WebrtcVideoConduit::SelectSendResolution(unsigned short width,
|
|||
}
|
||||
|
||||
if (changed) {
|
||||
// Get current vie codec.
|
||||
webrtc::VideoCodec vie_codec;
|
||||
int32_t err;
|
||||
// On a resolution change, bounce this to the correct thread to
|
||||
// re-configure (same as used for Init(). Do *not* block the calling
|
||||
// thread since that may be the MSG thread.
|
||||
|
||||
if ((err = mPtrViECodec->GetSendCodec(mChannel, vie_codec)) != 0)
|
||||
{
|
||||
CSFLogError(logTag, "%s: GetSendCodec failed, err %d", __FUNCTION__, err);
|
||||
return false;
|
||||
}
|
||||
// Likely spurious unless there was some error, but rarely checked
|
||||
if (vie_codec.width != width || vie_codec.height != height ||
|
||||
vie_codec.maxFramerate != mSendingFramerate)
|
||||
{
|
||||
vie_codec.width = width;
|
||||
vie_codec.height = height;
|
||||
vie_codec.maxFramerate = mSendingFramerate;
|
||||
SelectBandwidth(vie_codec, width, height);
|
||||
// MUST run on the same thread as Init()/etc
|
||||
if (!NS_IsMainThread()) {
|
||||
// Note: on *initial* config (first frame), best would be to drop
|
||||
// frames until the config is done, then encode the most recent frame
|
||||
// provided and continue from there. We don't do this, but we do drop
|
||||
// all frames while in the process of a reconfig and then encode the
|
||||
// frame that started the reconfig, which is close. There may be
|
||||
// barely perceptible glitch in the video due to the dropped frame(s).
|
||||
mInReconfig = true;
|
||||
|
||||
if ((err = mPtrViECodec->SetSendCodec(mChannel, vie_codec)) != 0)
|
||||
{
|
||||
CSFLogError(logTag, "%s: SetSendCodec(%ux%u) failed, err %d",
|
||||
__FUNCTION__, width, height, err);
|
||||
return false;
|
||||
// We can't pass a UniquePtr<> or unique_ptr<> to a lambda directly
|
||||
webrtc::I420VideoFrame *new_frame = nullptr;
|
||||
if (frame) {
|
||||
new_frame = new webrtc::I420VideoFrame();
|
||||
// the internal buffer pointer is refcounted, so we don't have 2 copies here
|
||||
new_frame->ShallowCopy(*frame);
|
||||
}
|
||||
CSFLogDebug(logTag, "%s: Encoder resolution changed to %ux%u @ %ufps, bitrate %u:%u",
|
||||
__FUNCTION__, width, height, mSendingFramerate,
|
||||
vie_codec.minBitrate, vie_codec.maxBitrate);
|
||||
} // else no change; mSendingWidth likely was 0
|
||||
RefPtr<WebrtcVideoConduit> self(this);
|
||||
RefPtr<nsRunnable> webrtc_runnable =
|
||||
media::NewRunnableFrom([self, width, height, new_frame]() -> nsresult {
|
||||
UniquePtr<webrtc::I420VideoFrame> local_frame(new_frame); // Simplify cleanup
|
||||
|
||||
MutexAutoLock lock(self->mCodecMutex);
|
||||
return self->ReconfigureSendCodec(width, height, new_frame);
|
||||
});
|
||||
// new_frame now owned by lambda
|
||||
CSFLogDebug(logTag, "%s: proxying lambda to WebRTC thread for reconfig (width %u/%u, height %u/%u",
|
||||
__FUNCTION__, width, mLastWidth, height, mLastHeight);
|
||||
NS_DispatchToMainThread(webrtc_runnable.forget());
|
||||
if (new_frame) {
|
||||
return true; // queued it
|
||||
}
|
||||
} else {
|
||||
// already on the right thread
|
||||
ReconfigureSendCodec(width, height, frame);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
nsresult
|
||||
WebrtcVideoConduit::ReconfigureSendCodec(unsigned short width,
|
||||
unsigned short height,
|
||||
webrtc::I420VideoFrame *frame)
|
||||
{
|
||||
mCodecMutex.AssertCurrentThreadOwns();
|
||||
|
||||
// Get current vie codec.
|
||||
webrtc::VideoCodec vie_codec;
|
||||
int32_t err;
|
||||
|
||||
mInReconfig = false;
|
||||
if ((err = mPtrViECodec->GetSendCodec(mChannel, vie_codec)) != 0)
|
||||
{
|
||||
CSFLogError(logTag, "%s: GetSendCodec failed, err %d", __FUNCTION__, err);
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
// Likely spurious unless there was some error, but rarely checked
|
||||
if (vie_codec.width != width || vie_codec.height != height ||
|
||||
vie_codec.maxFramerate != mSendingFramerate)
|
||||
{
|
||||
vie_codec.width = width;
|
||||
vie_codec.height = height;
|
||||
vie_codec.maxFramerate = mSendingFramerate;
|
||||
SelectBandwidth(vie_codec, width, height, mLastFramerateTenths);
|
||||
|
||||
if ((err = mPtrViECodec->SetSendCodec(mChannel, vie_codec)) != 0)
|
||||
{
|
||||
CSFLogError(logTag, "%s: SetSendCodec(%ux%u) failed, err %d",
|
||||
__FUNCTION__, width, height, err);
|
||||
return NS_ERROR_FAILURE;
|
||||
}
|
||||
CSFLogDebug(logTag, "%s: Encoder resolution changed to %ux%u @ %ufps, bitrate %u:%u",
|
||||
__FUNCTION__, width, height, mSendingFramerate,
|
||||
vie_codec.minBitrate, vie_codec.maxBitrate);
|
||||
} // else no change; mSendingWidth likely was 0
|
||||
if (frame) {
|
||||
// XXX I really don't like doing this from MainThread...
|
||||
mPtrExtCapture->IncomingFrame(*frame);
|
||||
mVideoCodecStat->SentFrame();
|
||||
CSFLogDebug(logTag, "%s Inserted a frame from reconfig lambda", __FUNCTION__);
|
||||
}
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
// Invoked under lock of mCodecMutex!
|
||||
|
@ -1158,6 +1245,7 @@ WebrtcVideoConduit::SelectSendFrameRate(unsigned int framerate) const
|
|||
MediaConduitErrorCode
|
||||
WebrtcVideoConduit::SetExternalSendCodec(VideoCodecConfig* config,
|
||||
VideoEncoder* encoder) {
|
||||
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
|
||||
if (!mPtrExtCodec->RegisterExternalSendCodec(mChannel,
|
||||
config->mType,
|
||||
static_cast<WebrtcVideoEncoder*>(encoder),
|
||||
|
@ -1172,6 +1260,7 @@ WebrtcVideoConduit::SetExternalSendCodec(VideoCodecConfig* config,
|
|||
MediaConduitErrorCode
|
||||
WebrtcVideoConduit::SetExternalRecvCodec(VideoCodecConfig* config,
|
||||
VideoDecoder* decoder) {
|
||||
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
|
||||
if (!mPtrExtCodec->RegisterExternalReceiveCodec(mChannel,
|
||||
config->mType,
|
||||
static_cast<WebrtcVideoDecoder*>(decoder))) {
|
||||
|
@ -1190,25 +1279,17 @@ WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
|
|||
VideoType video_type,
|
||||
uint64_t capture_time)
|
||||
{
|
||||
CSFLogDebug(logTag, "%s ", __FUNCTION__);
|
||||
|
||||
//check for the parameters sanity
|
||||
if(!video_frame || video_frame_length == 0 ||
|
||||
width == 0 || height == 0)
|
||||
{
|
||||
CSFLogError(logTag, "%s Invalid Parameters ",__FUNCTION__);
|
||||
MOZ_ASSERT(PR_FALSE);
|
||||
MOZ_ASSERT(false);
|
||||
return kMediaConduitMalformedArgument;
|
||||
}
|
||||
|
||||
// NOTE: update when common_types.h changes
|
||||
if (video_type > kVideoBGRA) {
|
||||
CSFLogError(logTag, "%s VideoType %d Invalid", __FUNCTION__, video_type);
|
||||
MOZ_ASSERT(PR_FALSE);
|
||||
return kMediaConduitMalformedArgument;
|
||||
}
|
||||
// RawVideoType == VideoType
|
||||
webrtc::RawVideoType type = static_cast<webrtc::RawVideoType>((int)video_type);
|
||||
MOZ_ASSERT(video_type == VideoType::kVideoI420);
|
||||
MOZ_ASSERT(mPtrExtCapture);
|
||||
|
||||
// Transmission should be enabled before we insert any frames.
|
||||
if(!mEngineTransmitting)
|
||||
|
@ -1217,25 +1298,36 @@ WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
|
|||
return kMediaConduitSessionNotInited;
|
||||
}
|
||||
|
||||
// insert the frame to video engine in I420 format only
|
||||
webrtc::I420VideoFrame i420_frame;
|
||||
i420_frame.CreateFrame(video_frame, width, height, webrtc::kVideoRotation_0);
|
||||
i420_frame.set_timestamp(capture_time);
|
||||
i420_frame.set_render_time_ms(capture_time);
|
||||
|
||||
return SendVideoFrame(i420_frame);
|
||||
}
|
||||
|
||||
MediaConduitErrorCode
|
||||
WebrtcVideoConduit::SendVideoFrame(webrtc::I420VideoFrame& frame)
|
||||
{
|
||||
CSFLogDebug(logTag, "%s ", __FUNCTION__);
|
||||
// See if we need to recalculate what we're sending.
|
||||
// Don't compate mSendingWidth/Height, since those may not be the same as the input.
|
||||
{
|
||||
MutexAutoLock lock(mCodecMutex);
|
||||
if (!SelectSendResolution(width, height))
|
||||
{
|
||||
return kMediaConduitCaptureError;
|
||||
if (mInReconfig) {
|
||||
// Waiting for it to finish
|
||||
return kMediaConduitNoError;
|
||||
}
|
||||
if (frame.width() != mLastWidth || frame.height() != mLastHeight) {
|
||||
if (SelectSendResolution(frame.width(), frame.height(), &frame)) {
|
||||
// SelectSendResolution took ownership of the data in i420_frame.
|
||||
// Submit the frame after reconfig is done
|
||||
return kMediaConduitNoError;
|
||||
}
|
||||
}
|
||||
}
|
||||
// insert the frame to video engine in I420 format only
|
||||
MOZ_ASSERT(mPtrExtCapture);
|
||||
if(mPtrExtCapture->IncomingFrame(video_frame,
|
||||
video_frame_length,
|
||||
width, height,
|
||||
type,
|
||||
(unsigned long long)capture_time) == -1)
|
||||
{
|
||||
CSFLogError(logTag, "%s IncomingFrame Failed %d ", __FUNCTION__,
|
||||
mPtrViEBase->LastError());
|
||||
return kMediaConduitCaptureError;
|
||||
}
|
||||
mPtrExtCapture->IncomingFrame(frame);
|
||||
|
||||
mVideoCodecStat->SentFrame();
|
||||
CSFLogDebug(logTag, "%s Inserted a frame", __FUNCTION__);
|
||||
|
@ -1330,6 +1422,7 @@ WebrtcVideoConduit::StartTransmitting()
|
|||
MediaConduitErrorCode
|
||||
WebrtcVideoConduit::StopReceiving()
|
||||
{
|
||||
NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
|
||||
// Are we receiving already? If so, stop receiving and playout
|
||||
// since we can't apply new recv codec when the engine is playing.
|
||||
if(mEngineReceiving)
|
||||
|
@ -1375,9 +1468,9 @@ WebrtcVideoConduit::StartReceiving()
|
|||
|
||||
//WebRTC::RTP Callback Implementation
|
||||
// Called on MSG thread
|
||||
int WebrtcVideoConduit::SendPacket(int channel, const void* data, int len)
|
||||
int WebrtcVideoConduit::SendPacket(int channel, const void* data, size_t len)
|
||||
{
|
||||
CSFLogDebug(logTag, "%s : channel %d len %d", __FUNCTION__, channel, len);
|
||||
CSFLogDebug(logTag, "%s : channel %d len %lu", __FUNCTION__, channel, (unsigned long) len);
|
||||
|
||||
ReentrantMonitorAutoEnter enter(mTransportMonitor);
|
||||
if(mTransmitterTransport &&
|
||||
|
@ -1392,9 +1485,9 @@ int WebrtcVideoConduit::SendPacket(int channel, const void* data, int len)
|
|||
}
|
||||
|
||||
// Called from multiple threads including webrtc Process thread
|
||||
int WebrtcVideoConduit::SendRTCPPacket(int channel, const void* data, int len)
|
||||
int WebrtcVideoConduit::SendRTCPPacket(int channel, const void* data, size_t len)
|
||||
{
|
||||
CSFLogDebug(logTag, "%s : channel %d , len %d ", __FUNCTION__, channel,len);
|
||||
CSFLogDebug(logTag, "%s : channel %d , len %lu ", __FUNCTION__, channel, (unsigned long) len);
|
||||
|
||||
// We come here if we have only one pipeline/conduit setup,
|
||||
// such as for unidirectional streams.
|
||||
|
@ -1442,13 +1535,27 @@ WebrtcVideoConduit::FrameSizeChange(unsigned int width,
|
|||
|
||||
int
|
||||
WebrtcVideoConduit::DeliverFrame(unsigned char* buffer,
|
||||
int buffer_size,
|
||||
size_t buffer_size,
|
||||
uint32_t time_stamp,
|
||||
int64_t ntp_time_ms,
|
||||
int64_t render_time,
|
||||
void *handle)
|
||||
{
|
||||
CSFLogDebug(logTag, "%s Buffer Size %d", __FUNCTION__, buffer_size);
|
||||
return DeliverFrame(buffer, buffer_size, mReceivingWidth, (mReceivingWidth+1)>>1,
|
||||
time_stamp, ntp_time_ms, render_time, handle);
|
||||
}
|
||||
|
||||
int
|
||||
WebrtcVideoConduit::DeliverFrame(unsigned char* buffer,
|
||||
size_t buffer_size,
|
||||
uint32_t y_stride,
|
||||
uint32_t cbcr_stride,
|
||||
uint32_t time_stamp,
|
||||
int64_t ntp_time_ms,
|
||||
int64_t render_time,
|
||||
void *handle)
|
||||
{
|
||||
CSFLogDebug(logTag, "%s Buffer Size %lu", __FUNCTION__, (unsigned long) buffer_size);
|
||||
|
||||
ReentrantMonitorAutoEnter enter(mTransportMonitor);
|
||||
if(mRenderer)
|
||||
|
@ -1474,8 +1581,61 @@ WebrtcVideoConduit::DeliverFrame(unsigned char* buffer,
|
|||
}
|
||||
|
||||
const ImageHandle img_h(img);
|
||||
mRenderer->RenderVideoFrame(buffer, buffer_size, time_stamp, render_time,
|
||||
img_h);
|
||||
mRenderer->RenderVideoFrame(buffer, buffer_size, y_stride, cbcr_stride,
|
||||
time_stamp, render_time, img_h);
|
||||
return 0;
|
||||
}
|
||||
|
||||
CSFLogError(logTag, "%s Renderer is NULL ", __FUNCTION__);
|
||||
return -1;
|
||||
}
|
||||
|
||||
int
|
||||
WebrtcVideoConduit::DeliverI420Frame(const webrtc::I420VideoFrame& webrtc_frame)
|
||||
{
|
||||
if (!webrtc_frame.native_handle()) {
|
||||
uint32_t y_stride = webrtc_frame.stride(static_cast<webrtc::PlaneType>(0));
|
||||
return DeliverFrame(const_cast<uint8_t*>(webrtc_frame.buffer(webrtc::kYPlane)),
|
||||
CalcBufferSize(webrtc::kI420, y_stride, webrtc_frame.height()),
|
||||
y_stride,
|
||||
webrtc_frame.stride(static_cast<webrtc::PlaneType>(1)),
|
||||
webrtc_frame.timestamp(),
|
||||
webrtc_frame.ntp_time_ms(),
|
||||
webrtc_frame.render_time_ms(), nullptr);
|
||||
}
|
||||
size_t buffer_size = CalcBufferSize(webrtc::kI420, webrtc_frame.width(), webrtc_frame.height());
|
||||
CSFLogDebug(logTag, "%s Buffer Size %lu", __FUNCTION__, (unsigned long) buffer_size);
|
||||
|
||||
ReentrantMonitorAutoEnter enter(mTransportMonitor);
|
||||
if(mRenderer)
|
||||
{
|
||||
layers::Image* img = nullptr;
|
||||
// |handle| should be a webrtc::NativeHandle if available.
|
||||
webrtc::NativeHandle* native_h = static_cast<webrtc::NativeHandle*>(webrtc_frame.native_handle());
|
||||
if (native_h) {
|
||||
// In the handle, there should be a layers::Image.
|
||||
img = static_cast<layers::Image*>(native_h->GetHandle());
|
||||
}
|
||||
|
||||
#if 0
|
||||
//#ifndef MOZ_WEBRTC_OMX
|
||||
// XXX - this may not be possible on GONK with textures!
|
||||
if (mVideoLatencyTestEnable && mReceivingWidth && mReceivingHeight) {
|
||||
uint64_t now = PR_Now();
|
||||
uint64_t timestamp = 0;
|
||||
bool ok = YuvStamper::Decode(mReceivingWidth, mReceivingHeight, mReceivingWidth,
|
||||
buffer,
|
||||
reinterpret_cast<unsigned char*>(×tamp),
|
||||
sizeof(timestamp), 0, 0);
|
||||
if (ok) {
|
||||
VideoLatencyUpdate(now - timestamp);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
const ImageHandle img_h(img);
|
||||
mRenderer->RenderVideoFrame(nullptr, buffer_size, webrtc_frame.timestamp(),
|
||||
webrtc_frame.render_time_ms(), img_h);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
|
|
@ -8,12 +8,14 @@
|
|||
#include "nsAutoPtr.h"
|
||||
#include "mozilla/Attributes.h"
|
||||
#include "mozilla/Atomics.h"
|
||||
#include "mozilla/SharedThreadPool.h"
|
||||
|
||||
#include "MediaConduitInterface.h"
|
||||
#include "MediaEngineWrapper.h"
|
||||
#include "CodecStatistics.h"
|
||||
#include "LoadManagerFactory.h"
|
||||
#include "LoadManager.h"
|
||||
#include "runnable_utils.h"
|
||||
|
||||
// conflicts with #include of scoped_ptr.h
|
||||
#undef FF
|
||||
|
@ -46,6 +48,7 @@
|
|||
namespace mozilla {
|
||||
|
||||
class WebrtcAudioConduit;
|
||||
class nsThread;
|
||||
|
||||
// Interface of external video encoder for WebRTC.
|
||||
class WebrtcVideoEncoder:public VideoEncoder
|
||||
|
@ -61,9 +64,9 @@ class WebrtcVideoDecoder:public VideoDecoder
|
|||
* Concrete class for Video session. Hooks up
|
||||
* - media-source and target to external transport
|
||||
*/
|
||||
class WebrtcVideoConduit:public VideoSessionConduit
|
||||
,public webrtc::Transport
|
||||
,public webrtc::ExternalRenderer
|
||||
class WebrtcVideoConduit : public VideoSessionConduit
|
||||
, public webrtc::Transport
|
||||
, public webrtc::ExternalRenderer
|
||||
{
|
||||
public:
|
||||
//VoiceEngine defined constant for Payload Name Size.
|
||||
|
@ -135,17 +138,25 @@ public:
|
|||
|
||||
virtual MediaConduitErrorCode SetReceiverTransport(RefPtr<TransportInterface> aTransport) override;
|
||||
|
||||
void SelectBandwidth(webrtc::VideoCodec& vie_codec,
|
||||
unsigned short width,
|
||||
unsigned short height);
|
||||
/**
|
||||
* Function to select and change the encoding resolution based on incoming frame size
|
||||
* and current available bandwidth.
|
||||
* @param width, height: dimensions of the frame
|
||||
* @param force: force setting the codec config if framerate may require a bandwidth change
|
||||
* @param frame: optional frame to submit for encoding after reconfig
|
||||
*/
|
||||
bool SelectSendResolution(unsigned short width,
|
||||
unsigned short height);
|
||||
unsigned short height,
|
||||
webrtc::I420VideoFrame *frame);
|
||||
|
||||
/**
|
||||
* Function to reconfigure the current send codec for a different
|
||||
* width/height/framerate/etc.
|
||||
* @param width, height: dimensions of the frame
|
||||
* @param frame: optional frame to submit for encoding after reconfig
|
||||
*/
|
||||
nsresult ReconfigureSendCodec(unsigned short width,
|
||||
unsigned short height,
|
||||
webrtc::I420VideoFrame *frame);
|
||||
|
||||
/**
|
||||
* Function to select and change the encoding frame rate based on incoming frame rate
|
||||
|
@ -172,6 +183,7 @@ public:
|
|||
unsigned short height,
|
||||
VideoType video_type,
|
||||
uint64_t capture_time) override;
|
||||
virtual MediaConduitErrorCode SendVideoFrame(webrtc::I420VideoFrame& frame) override;
|
||||
|
||||
/**
|
||||
* Set an external encoder object |encoder| to the payload type |pltype|
|
||||
|
@ -192,13 +204,13 @@ public:
|
|||
* Webrtc transport implementation to send and receive RTP packet.
|
||||
* VideoConduit registers itself as ExternalTransport to the VideoEngine
|
||||
*/
|
||||
virtual int SendPacket(int channel, const void *data, int len) override;
|
||||
virtual int SendPacket(int channel, const void *data, size_t len) override;
|
||||
|
||||
/**
|
||||
* Webrtc transport implementation to send and receive RTCP packet.
|
||||
* VideoConduit registers itself as ExternalTransport to the VideoEngine
|
||||
*/
|
||||
virtual int SendRTCPPacket(int channel, const void *data, int len) override;
|
||||
virtual int SendRTCPPacket(int channel, const void *data, size_t len) override;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -207,9 +219,14 @@ public:
|
|||
*/
|
||||
virtual int FrameSizeChange(unsigned int, unsigned int, unsigned int) override;
|
||||
|
||||
virtual int DeliverFrame(unsigned char*, int, uint32_t , int64_t,
|
||||
virtual int DeliverFrame(unsigned char*, size_t, uint32_t , int64_t,
|
||||
int64_t, void *handle) override;
|
||||
|
||||
virtual int DeliverFrame(unsigned char*, size_t, uint32_t, uint32_t, uint32_t , int64_t,
|
||||
int64_t, void *handle);
|
||||
|
||||
virtual int DeliverI420Frame(const webrtc::I420VideoFrame& webrtc_frame) override;
|
||||
|
||||
/**
|
||||
* Does DeliverFrame() support a null buffer and non-null handle
|
||||
* (video texture)?
|
||||
|
@ -251,7 +268,9 @@ public:
|
|||
WebrtcVideoConduit();
|
||||
virtual ~WebrtcVideoConduit();
|
||||
|
||||
MediaConduitErrorCode Init();
|
||||
MediaConduitErrorCode InitMain();
|
||||
virtual MediaConduitErrorCode Init();
|
||||
virtual void Destroy();
|
||||
|
||||
int GetChannel() { return mChannel; }
|
||||
webrtc::VideoEngine* GetVideoEngine() { return mVideoEngine; }
|
||||
|
@ -285,9 +304,24 @@ public:
|
|||
uint64_t MozVideoLatencyAvg();
|
||||
|
||||
private:
|
||||
DISALLOW_COPY_AND_ASSIGN(WebrtcVideoConduit);
|
||||
|
||||
WebrtcVideoConduit(const WebrtcVideoConduit& other) = delete;
|
||||
void operator=(const WebrtcVideoConduit& other) = delete;
|
||||
static inline bool OnThread(nsIEventTarget *thread)
|
||||
{
|
||||
bool on;
|
||||
nsresult rv;
|
||||
rv = thread->IsOnCurrentThread(&on);
|
||||
|
||||
// If the target thread has already shut down, we don't want to assert.
|
||||
if (rv != NS_ERROR_NOT_INITIALIZED) {
|
||||
MOZ_ASSERT(NS_SUCCEEDED(rv));
|
||||
}
|
||||
|
||||
if (NS_WARN_IF(NS_FAILED(rv))) {
|
||||
return false;
|
||||
}
|
||||
return on;
|
||||
}
|
||||
|
||||
//Local database of currently applied receive codecs
|
||||
typedef std::vector<VideoCodecConfig* > RecvCodecList;
|
||||
|
@ -340,7 +374,10 @@ private:
|
|||
|
||||
Mutex mCodecMutex; // protects mCurrSendCodecConfig
|
||||
nsAutoPtr<VideoCodecConfig> mCurSendCodecConfig;
|
||||
bool mInReconfig;
|
||||
|
||||
unsigned short mLastWidth;
|
||||
unsigned short mLastHeight;
|
||||
unsigned short mSendingWidth;
|
||||
unsigned short mSendingHeight;
|
||||
unsigned short mReceivingWidth;
|
||||
|
@ -372,7 +409,6 @@ private:
|
|||
nsAutoPtr<LoadManager> mLoadManager;
|
||||
webrtc::VideoCodecMode mCodecMode;
|
||||
};
|
||||
|
||||
} // end namespace
|
||||
|
||||
#endif
|
||||
|
|
|
@ -486,7 +486,9 @@ int32_t
|
|||
WebrtcGmpVideoEncoder::SetRates(uint32_t aNewBitRate, uint32_t aFrameRate)
|
||||
{
|
||||
MOZ_ASSERT(mGMPThread);
|
||||
MOZ_ASSERT(!NS_IsMainThread());
|
||||
if (aFrameRate == 0) {
|
||||
aFrameRate = 30; // Assume 30fps if we don't know the rate
|
||||
}
|
||||
mGMPThread->Dispatch(WrapRunnableNM(&WebrtcGmpVideoEncoder::SetRates_g,
|
||||
RefPtr<WebrtcGmpVideoEncoder>(this),
|
||||
aNewBitRate,
|
||||
|
@ -926,11 +928,8 @@ WebrtcGmpVideoDecoder::Decoded(GMPVideoi420Frame* aDecodedFrame)
|
|||
MutexAutoLock lock(mCallbackMutex);
|
||||
if (mCallback) {
|
||||
webrtc::I420VideoFrame image;
|
||||
int ret = image.CreateFrame(aDecodedFrame->AllocatedSize(kGMPYPlane),
|
||||
aDecodedFrame->Buffer(kGMPYPlane),
|
||||
aDecodedFrame->AllocatedSize(kGMPUPlane),
|
||||
int ret = image.CreateFrame(aDecodedFrame->Buffer(kGMPYPlane),
|
||||
aDecodedFrame->Buffer(kGMPUPlane),
|
||||
aDecodedFrame->AllocatedSize(kGMPVPlane),
|
||||
aDecodedFrame->Buffer(kGMPVPlane),
|
||||
aDecodedFrame->Width(),
|
||||
aDecodedFrame->Height(),
|
||||
|
|
|
@ -307,7 +307,7 @@ class WebrtcVideoEncoderProxy : public WebrtcVideoEncoder
|
|||
|
||||
int32_t InitEncode(const webrtc::VideoCodec* aCodecSettings,
|
||||
int32_t aNumberOfCores,
|
||||
uint32_t aMaxPayloadSize) override
|
||||
size_t aMaxPayloadSize) override
|
||||
{
|
||||
return mEncoderImpl->InitEncode(aCodecSettings,
|
||||
aNumberOfCores,
|
||||
|
@ -336,7 +336,7 @@ class WebrtcVideoEncoderProxy : public WebrtcVideoEncoder
|
|||
}
|
||||
|
||||
int32_t SetChannelParameters(uint32_t aPacketLoss,
|
||||
int aRTT) override
|
||||
int64_t aRTT) override
|
||||
{
|
||||
return mEncoderImpl->SetChannelParameters(aPacketLoss, aRTT);
|
||||
}
|
||||
|
|
|
@ -839,7 +839,7 @@ WebrtcMediaCodecVP8VideoEncoder::~WebrtcMediaCodecVP8VideoEncoder() {
|
|||
Release();
|
||||
}
|
||||
|
||||
int32_t WebrtcMediaCodecVP8VideoEncoder::SetChannelParameters(uint32_t packetLoss, int rtt) {
|
||||
int32_t WebrtcMediaCodecVP8VideoEncoder::SetChannelParameters(uint32_t packetLoss, int64_t rtt) {
|
||||
CSFLogDebug(logTag, "%s ", __FUNCTION__);
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ public:
|
|||
|
||||
virtual int32_t Release() override;
|
||||
|
||||
virtual int32_t SetChannelParameters(uint32_t packetLoss, int rtt) override;
|
||||
virtual int32_t SetChannelParameters(uint32_t packetLoss, int64_t rtt) override;
|
||||
|
||||
virtual int32_t SetRates(uint32_t newBitRate, uint32_t frameRate) override;
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
using namespace android;
|
||||
|
||||
// WebRTC
|
||||
#include "webrtc/common_video/interface/texture_video_frame.h"
|
||||
//#include "webrtc/common_video/interface/texture_video_frame.h"
|
||||
#include "webrtc/video_engine/include/vie_external_codec.h"
|
||||
#include "runnable_utils.h"
|
||||
|
||||
|
@ -172,9 +172,12 @@ public:
|
|||
|
||||
NS_IMETHODIMP Run() override
|
||||
{
|
||||
MonitorAutoLock lock(mMonitor);
|
||||
if (mEnding) {
|
||||
return NS_OK;
|
||||
}
|
||||
MOZ_ASSERT(mThread);
|
||||
|
||||
MonitorAutoLock lock(mMonitor);
|
||||
while (true) {
|
||||
if (mInputFrames.empty()) {
|
||||
// Wait for new input.
|
||||
|
@ -546,12 +549,12 @@ public:
|
|||
CODEC_LOGD("Decoder NewFrame: %dx%d, timestamp %lld, renderTimeMs %lld",
|
||||
picSize.width, picSize.height, timestamp, renderTimeMs);
|
||||
|
||||
nsAutoPtr<webrtc::I420VideoFrame> videoFrame(
|
||||
new webrtc::TextureVideoFrame(new ImageNativeHandle(grallocImage.forget()),
|
||||
picSize.width,
|
||||
picSize.height,
|
||||
timestamp,
|
||||
renderTimeMs));
|
||||
nsAutoPtr<webrtc::I420VideoFrame> videoFrame(new webrtc::I420VideoFrame(
|
||||
new ImageNativeHandle(grallocImage.forget()),
|
||||
grallocData.mPicSize.width,
|
||||
grallocData.mPicSize.height,
|
||||
timestamp,
|
||||
renderTimeMs));
|
||||
if (videoFrame != nullptr) {
|
||||
mCallback->Decoded(*videoFrame);
|
||||
}
|
||||
|
@ -1074,9 +1077,9 @@ WebrtcOMXH264VideoEncoder::~WebrtcOMXH264VideoEncoder()
|
|||
// Note: stagefright doesn't handle these parameters.
|
||||
int32_t
|
||||
WebrtcOMXH264VideoEncoder::SetChannelParameters(uint32_t aPacketLossRate,
|
||||
int aRoundTripTimeMs)
|
||||
int64_t aRoundTripTimeMs)
|
||||
{
|
||||
CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set channel packet loss:%u, rtt:%d",
|
||||
CODEC_LOGD("WebrtcOMXH264VideoEncoder:%p set channel packet loss:%u, rtt:%" PRIi64,
|
||||
this, aPacketLossRate, aRoundTripTimeMs);
|
||||
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
|
|
|
@ -51,7 +51,7 @@ public:
|
|||
virtual int32_t Release() override;
|
||||
|
||||
virtual int32_t SetChannelParameters(uint32_t aPacketLossRate,
|
||||
int aRoundTripTimeMs) override;
|
||||
int64_t aRoundTripTimeMs) override;
|
||||
|
||||
virtual int32_t SetRates(uint32_t aBitRate, uint32_t aFrameRate) override;
|
||||
|
||||
|
|
|
@ -47,6 +47,11 @@
|
|||
#include "mozilla/UniquePtr.h"
|
||||
#include "mozilla/UniquePtrExtensions.h"
|
||||
|
||||
#include "webrtc/common_types.h"
|
||||
#include "webrtc/common_video/interface/native_handle.h"
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "webrtc/video_engine/include/vie_errors.h"
|
||||
|
||||
#include "logging.h"
|
||||
|
||||
// Should come from MediaEngineWebRTC.h, but that's a pain to include here
|
||||
|
@ -1101,11 +1106,39 @@ void MediaPipelineTransmit::PipelineListener::ProcessVideoChunk(
|
|||
uint32_t width = graphicBuffer->getWidth();
|
||||
uint32_t height = graphicBuffer->getHeight();
|
||||
// XXX gralloc buffer's width and stride could be different depends on implementations.
|
||||
conduit->SendVideoFrame(static_cast<unsigned char*>(basePtr),
|
||||
I420SIZE(width, height),
|
||||
width,
|
||||
height,
|
||||
destFormat, 0);
|
||||
|
||||
if (destFormat != mozilla::kVideoI420) {
|
||||
unsigned char *video_frame = static_cast<unsigned char*>(basePtr);
|
||||
webrtc::I420VideoFrame i420_frame;
|
||||
int stride_y = width;
|
||||
int stride_uv = (width + 1) / 2;
|
||||
int target_width = width;
|
||||
int target_height = height;
|
||||
if (i420_frame.CreateEmptyFrame(target_width,
|
||||
abs(target_height),
|
||||
stride_y,
|
||||
stride_uv, stride_uv) < 0) {
|
||||
MOZ_ASSERT(false, "Can't allocate empty i420frame");
|
||||
return;
|
||||
}
|
||||
webrtc::VideoType commonVideoType =
|
||||
webrtc::RawVideoTypeToCommonVideoVideoType(
|
||||
static_cast<webrtc::RawVideoType>((int)destFormat));
|
||||
if (ConvertToI420(commonVideoType, video_frame, 0, 0, width, height,
|
||||
I420SIZE(width, height), webrtc::kVideoRotation_0,
|
||||
&i420_frame)) {
|
||||
MOZ_ASSERT(false, "Can't convert video type for sending to I420");
|
||||
return;
|
||||
}
|
||||
i420_frame.set_ntp_time_ms(0);
|
||||
conduit->SendVideoFrame(i420_frame);
|
||||
} else {
|
||||
conduit->SendVideoFrame(static_cast<unsigned char*>(basePtr),
|
||||
I420SIZE(width, height),
|
||||
width,
|
||||
height,
|
||||
destFormat, 0);
|
||||
}
|
||||
graphicBuffer->unlock();
|
||||
return;
|
||||
} else
|
||||
|
@ -1464,7 +1497,19 @@ MediaPipelineReceiveVideo::PipelineListener::PipelineListener(
|
|||
|
||||
void MediaPipelineReceiveVideo::PipelineListener::RenderVideoFrame(
|
||||
const unsigned char* buffer,
|
||||
unsigned int buffer_size,
|
||||
size_t buffer_size,
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time,
|
||||
const RefPtr<Image>& video_image) {
|
||||
RenderVideoFrame(buffer, buffer_size, width_, (width_ + 1) >> 1,
|
||||
time_stamp, render_time, video_image);
|
||||
}
|
||||
|
||||
void MediaPipelineReceiveVideo::PipelineListener::RenderVideoFrame(
|
||||
const unsigned char* buffer,
|
||||
size_t buffer_size,
|
||||
uint32_t y_stride,
|
||||
uint32_t cbcr_stride,
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time,
|
||||
const RefPtr<Image>& video_image) {
|
||||
|
@ -1489,12 +1534,12 @@ void MediaPipelineReceiveVideo::PipelineListener::RenderVideoFrame(
|
|||
|
||||
PlanarYCbCrData yuvData;
|
||||
yuvData.mYChannel = frame;
|
||||
yuvData.mYSize = IntSize(width_, height_);
|
||||
yuvData.mYStride = width_;
|
||||
yuvData.mCbCrStride = (width_ + 1) >> 1;
|
||||
yuvData.mYSize = IntSize(y_stride, height_);
|
||||
yuvData.mYStride = y_stride;
|
||||
yuvData.mCbCrStride = cbcr_stride;
|
||||
yuvData.mCbChannel = frame + height_ * yuvData.mYStride;
|
||||
yuvData.mCrChannel = yuvData.mCbChannel + ((height_ + 1) >> 1) * yuvData.mCbCrStride;
|
||||
yuvData.mCbCrSize = IntSize((width_ + 1) >> 1, (height_ + 1) >> 1);
|
||||
yuvData.mCbCrSize = IntSize(yuvData.mCbCrStride, (height_ + 1) >> 1);
|
||||
yuvData.mPicX = 0;
|
||||
yuvData.mPicY = 0;
|
||||
yuvData.mPicSize = IntSize(width_, height_);
|
||||
|
|
|
@ -701,17 +701,30 @@ class MediaPipelineReceiveVideo : public MediaPipelineReceive {
|
|||
// Implement VideoRenderer
|
||||
virtual void FrameSizeChange(unsigned int width,
|
||||
unsigned int height,
|
||||
unsigned int number_of_streams) {
|
||||
unsigned int number_of_streams) override {
|
||||
pipeline_->listener_->FrameSizeChange(width, height, number_of_streams);
|
||||
}
|
||||
|
||||
virtual void RenderVideoFrame(const unsigned char* buffer,
|
||||
unsigned int buffer_size,
|
||||
size_t buffer_size,
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time,
|
||||
const ImageHandle& handle) {
|
||||
pipeline_->listener_->RenderVideoFrame(buffer, buffer_size, time_stamp,
|
||||
render_time,
|
||||
const ImageHandle& handle) override {
|
||||
pipeline_->listener_->RenderVideoFrame(buffer, buffer_size,
|
||||
time_stamp, render_time,
|
||||
handle.GetImage());
|
||||
}
|
||||
|
||||
virtual void RenderVideoFrame(const unsigned char* buffer,
|
||||
size_t buffer_size,
|
||||
uint32_t y_stride,
|
||||
uint32_t cbcr_stride,
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time,
|
||||
const ImageHandle& handle) override {
|
||||
pipeline_->listener_->RenderVideoFrame(buffer, buffer_size,
|
||||
y_stride, cbcr_stride,
|
||||
time_stamp, render_time,
|
||||
handle.GetImage());
|
||||
}
|
||||
|
||||
|
@ -745,7 +758,14 @@ class MediaPipelineReceiveVideo : public MediaPipelineReceive {
|
|||
}
|
||||
|
||||
void RenderVideoFrame(const unsigned char* buffer,
|
||||
unsigned int buffer_size,
|
||||
size_t buffer_size,
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time,
|
||||
const RefPtr<layers::Image>& video_image);
|
||||
void RenderVideoFrame(const unsigned char* buffer,
|
||||
size_t buffer_size,
|
||||
uint32_t y_stride,
|
||||
uint32_t cbcr_stride,
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time,
|
||||
const RefPtr<layers::Image>& video_image);
|
||||
|
|
|
@ -378,10 +378,21 @@ public:
|
|||
|
||||
|
||||
void RenderVideoFrame(const unsigned char* buffer,
|
||||
unsigned int buffer_size,
|
||||
size_t buffer_size,
|
||||
uint32_t y_stride,
|
||||
uint32_t cbcr_stride,
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time,
|
||||
const mozilla::ImageHandle& handle)
|
||||
const mozilla::ImageHandle& handle) override
|
||||
{
|
||||
RenderVideoFrame(buffer, buffer_size, time_stamp, render_time, handle);
|
||||
}
|
||||
|
||||
void RenderVideoFrame(const unsigned char* buffer,
|
||||
size_t buffer_size,
|
||||
uint32_t time_stamp,
|
||||
int64_t render_time,
|
||||
const mozilla::ImageHandle& handle) override
|
||||
{
|
||||
//write the frame to the file
|
||||
if(VerifyFrame(buffer, buffer_size) == 0)
|
||||
|
@ -393,7 +404,7 @@ public:
|
|||
}
|
||||
}
|
||||
|
||||
void FrameSizeChange(unsigned int, unsigned int, unsigned int)
|
||||
void FrameSizeChange(unsigned int, unsigned int, unsigned int) override
|
||||
{
|
||||
//do nothing
|
||||
}
|
||||
|
|
|
@ -396,10 +396,15 @@ class MediaPipelineTest : public ::testing::Test {
|
|||
// Setup transport flows
|
||||
InitTransports(aIsRtcpMux);
|
||||
|
||||
#if 0 //DEBUG(pkerr)
|
||||
mozilla::SyncRunnable::DispatchToThread(
|
||||
test_utils->sts_target(),
|
||||
WrapRunnable(&p1_, &TestAgent::CreatePipelines_s, aIsRtcpMux));
|
||||
|
||||
#else
|
||||
NS_DispatchToMainThread(
|
||||
WrapRunnable(&p1_, &TestAgent::CreatePipelines_s, aIsRtcpMux),
|
||||
NS_DISPATCH_SYNC);
|
||||
#endif
|
||||
mozilla::SyncRunnable::DispatchToThread(
|
||||
test_utils->sts_target(),
|
||||
WrapRunnable(&p2_, &TestAgent::CreatePipelines_s, aIsRtcpMux));
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
# You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
#dummy gypi: contents should be discarded due to an enclosing 'conditions:' element.
|
||||
{}
|
|
@ -36,7 +36,7 @@
|
|||
'webrtc/modules/modules.gyp:audio_device',
|
||||
'webrtc/modules/modules.gyp:video_capture_module',
|
||||
'webrtc/modules/modules.gyp:video_capture_module_internal_impl',
|
||||
'webrtc/modules/modules.gyp:video_render_module_impl',
|
||||
'webrtc/modules/modules.gyp:video_render',
|
||||
# 'webrtc/system_wrappers/source/system_wrappers.gyp:system_wrappers',
|
||||
# 'webrtc/system_wrappers/source/system_wrappers.gyp:metrics_default',
|
||||
'webrtc/video_engine/video_engine.gyp:video_engine_core',
|
||||
|
|
|
@ -58,6 +58,20 @@
|
|||
'timeutils.h',
|
||||
'trace_event.h',
|
||||
],
|
||||
'conditions': [
|
||||
['OS=="mac"', {
|
||||
'sources': [
|
||||
'macutils.cc',
|
||||
'macutils.h',
|
||||
],
|
||||
}],
|
||||
['OS=="win"', {
|
||||
'sources': [
|
||||
'win32.cc',
|
||||
'win32.h',
|
||||
],
|
||||
}],
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'rtc_base',
|
||||
|
@ -99,14 +113,14 @@
|
|||
'basicdefs.h',
|
||||
'basictypes.h',
|
||||
'bind.h',
|
||||
'bind.h.pump',
|
||||
# 'bind.h.pump',
|
||||
'buffer.cc',
|
||||
'buffer.h',
|
||||
'bytebuffer.cc',
|
||||
'bytebuffer.h',
|
||||
'byteorder.h',
|
||||
'callback.h',
|
||||
'callback.h.pump',
|
||||
# 'callback.h.pump',
|
||||
'constructormagic.h',
|
||||
'common.cc',
|
||||
'common.h',
|
||||
|
@ -156,9 +170,9 @@
|
|||
'json.cc',
|
||||
'json.h',
|
||||
'latebindingsymboltable.cc',
|
||||
'latebindingsymboltable.cc.def',
|
||||
# 'latebindingsymboltable.cc.def',
|
||||
'latebindingsymboltable.h',
|
||||
'latebindingsymboltable.h.def',
|
||||
# 'latebindingsymboltable.h.def',
|
||||
'libdbusglibsymboltable.cc',
|
||||
'libdbusglibsymboltable.h',
|
||||
'linux.cc',
|
||||
|
@ -178,8 +192,9 @@
|
|||
'macconversion.h',
|
||||
'macsocketserver.cc',
|
||||
'macsocketserver.h',
|
||||
'macutils.cc',
|
||||
'macutils.h',
|
||||
# moved by mozilla
|
||||
# 'macutils.cc',
|
||||
# 'macutils.h',
|
||||
'macwindowpicker.cc',
|
||||
'macwindowpicker.h',
|
||||
'mathutils.h',
|
||||
|
@ -297,8 +312,8 @@
|
|||
'versionparsing.h',
|
||||
'virtualsocketserver.cc',
|
||||
'virtualsocketserver.h',
|
||||
'win32.cc',
|
||||
'win32.h',
|
||||
# 'win32.cc',
|
||||
# 'win32.h',
|
||||
'win32filesystem.cc',
|
||||
'win32filesystem.h',
|
||||
'win32regkey.cc',
|
||||
|
@ -365,9 +380,9 @@
|
|||
'bandwidthsmoother.h',
|
||||
'basictypes.h',
|
||||
'bind.h',
|
||||
'bind.h.pump',
|
||||
# 'bind.h.pump',
|
||||
'callback.h',
|
||||
'callback.h.pump',
|
||||
# 'callback.h.pump',
|
||||
'constructormagic.h',
|
||||
'dbus.cc',
|
||||
'dbus.h',
|
||||
|
@ -377,15 +392,15 @@
|
|||
'filelock.h',
|
||||
'fileutils_mock.h',
|
||||
'genericslot.h',
|
||||
'genericslot.h.pump',
|
||||
# 'genericslot.h.pump',
|
||||
'httpserver.cc',
|
||||
'httpserver.h',
|
||||
'json.cc',
|
||||
'json.h',
|
||||
'latebindingsymboltable.cc',
|
||||
'latebindingsymboltable.cc.def',
|
||||
# 'latebindingsymboltable.cc.def',
|
||||
'latebindingsymboltable.h',
|
||||
'latebindingsymboltable.h.def',
|
||||
# 'latebindingsymboltable.h.def',
|
||||
'libdbusglibsymboltable.cc',
|
||||
'libdbusglibsymboltable.h',
|
||||
'linuxfdwalk.c',
|
||||
|
|
|
@ -79,7 +79,8 @@ bool Base64::GetNextBase64Char(char ch, char* next_ch) {
|
|||
if (next_ch == NULL) {
|
||||
return false;
|
||||
}
|
||||
const char* p = strchr(Base64Table, ch);
|
||||
// Evil due to base/stringutils.h wanting non-standard &char for the second arg
|
||||
const char* p = strchr(Base64Table, &ch);
|
||||
if (!p)
|
||||
return false;
|
||||
++p;
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
#include <cstdio>
|
||||
#include <cstdlib>
|
||||
|
||||
#if defined(__GLIBCXX__) && !defined(__UCLIBC__)
|
||||
#if defined(__GLIBC__) && !defined(__UCLIBC__)
|
||||
#include <cxxabi.h>
|
||||
#include <execinfo.h>
|
||||
#endif
|
||||
|
@ -55,7 +55,7 @@ void PrintError(const char* format, ...) {
|
|||
// to get usable symbols on Linux. This is copied from V8. Chromium has a more
|
||||
// advanced stace trace system; also more difficult to copy.
|
||||
void DumpBacktrace() {
|
||||
#if defined(__GLIBCXX__) && !defined(__UCLIBC__)
|
||||
#if defined(__GLIBC__) && !defined(__UCLIBC__)
|
||||
void* trace[100];
|
||||
int size = backtrace(trace, sizeof(trace) / sizeof(*trace));
|
||||
char** symbols = backtrace_symbols(trace, size);
|
||||
|
|
|
@ -10,7 +10,9 @@
|
|||
|
||||
#include <sstream>
|
||||
|
||||
#ifndef WEBRTC_MOZILLA_BUILD
|
||||
#include "webrtc/base/common.h"
|
||||
#endif
|
||||
#include "webrtc/base/logging.h"
|
||||
#include "webrtc/base/macutils.h"
|
||||
#include "webrtc/base/scoped_ptr.h"
|
||||
|
@ -70,7 +72,9 @@ void DecodeFourChar(UInt32 fc, std::string* out) {
|
|||
}
|
||||
|
||||
static bool GetGestalt(OSType ostype, int* value) {
|
||||
#ifndef WEBRTC_MOZILLA_BUILD
|
||||
ASSERT(NULL != value);
|
||||
#endif
|
||||
SInt32 native_value;
|
||||
OSStatus result = Gestalt(ostype, &native_value);
|
||||
if (noErr == result) {
|
||||
|
@ -79,12 +83,16 @@ static bool GetGestalt(OSType ostype, int* value) {
|
|||
}
|
||||
std::string str;
|
||||
DecodeFourChar(ostype, &str);
|
||||
#ifndef WEBRTC_MOZILLA_BUILD
|
||||
LOG_E(LS_ERROR, OS, result) << "Gestalt(" << str << ")";
|
||||
#endif
|
||||
return false;
|
||||
}
|
||||
|
||||
bool GetOSVersion(int* major, int* minor, int* bugfix) {
|
||||
#ifndef WEBRTC_MOZILLA_BUILD
|
||||
ASSERT(major && minor && bugfix);
|
||||
#endif
|
||||
if (!GetGestalt(gestaltSystemVersion, major)) {
|
||||
return false;
|
||||
}
|
||||
|
@ -141,6 +149,7 @@ bool GetQuickTimeVersion(std::string* out) {
|
|||
return true;
|
||||
}
|
||||
|
||||
#ifndef WEBRTC_MOZILLA_BUILD
|
||||
bool RunAppleScript(const std::string& script) {
|
||||
// TODO(thaloun): Add a .mm file that contains something like this:
|
||||
// NSString source from script
|
||||
|
@ -214,6 +223,8 @@ bool RunAppleScript(const std::string& script) {
|
|||
return false;
|
||||
#endif // CARBON_DEPRECATED
|
||||
}
|
||||
#endif // !WEBRTC_MOZILLA
|
||||
|
||||
#endif // WEBRTC_MAC && !defined(WEBRTC_IOS)
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
|
|
@ -47,10 +47,12 @@ bool GetOSVersion(int* major, int* minor, int* bugfix);
|
|||
MacOSVersionName GetOSVersionName();
|
||||
bool GetQuickTimeVersion(std::string* version);
|
||||
|
||||
#ifndef WEBRTC_MOZILLA_BUILD
|
||||
// Runs the given apple script. Only supports scripts that does not
|
||||
// require user interaction.
|
||||
bool RunAppleScript(const std::string& script);
|
||||
#endif
|
||||
#endif
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
|
|
@ -109,6 +109,19 @@
|
|||
#include "webrtc/base/template_util.h"
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
// XXX This file creates unused typedefs as a way of doing static assertions,
|
||||
// both via COMPILE_ASSERT and via direct typedefs like
|
||||
// 'type_must_be_complete'. These trigger a GCC warning (enabled by -Wall in
|
||||
// GCC 4.8 and above) which we disable here, just for this file, for GCC > 4.8.
|
||||
// This can be removed if & when this file (and COMPILE_ASSERT) stops using
|
||||
// these typedefs.
|
||||
#if defined(__GNUC__)
|
||||
#if !defined(__clang__) && ((__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8))
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wunused-local-typedefs"
|
||||
#endif // not clang, and version >= 4.8
|
||||
#endif // GCC or clang
|
||||
|
||||
namespace rtc {
|
||||
|
||||
// Function object which deletes its parameter, which must be a pointer.
|
||||
|
@ -623,4 +636,11 @@ rtc::scoped_ptr<T> rtc_make_scoped_ptr(T* ptr) {
|
|||
return rtc::scoped_ptr<T>(ptr);
|
||||
}
|
||||
|
||||
// Pop off 'ignored "-Wunused-local-typedefs"':
|
||||
#if defined(__GNUC__)
|
||||
#if !defined(__clang__) && ((__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8))
|
||||
#pragma GCC diagnostic pop
|
||||
#endif // not clang, and version >= 4.8
|
||||
#endif // GCC or clang
|
||||
|
||||
#endif // #ifndef WEBRTC_BASE_SCOPED_PTR_H__
|
||||
|
|
|
@ -86,6 +86,8 @@
|
|||
|
||||
#ifndef WEBRTC_BASE_SIGSLOT_H__
|
||||
#define WEBRTC_BASE_SIGSLOT_H__
|
||||
#ifndef TALK_BASE_SIGSLOT_H__
|
||||
#define TALK_BASE_SIGSLOT_H__
|
||||
|
||||
#include <list>
|
||||
#include <set>
|
||||
|
@ -2801,4 +2803,5 @@ namespace sigslot {
|
|||
|
||||
}; // namespace sigslot
|
||||
|
||||
#endif // TALK_BASE_SIGSLOT_H__
|
||||
#endif // WEBRTC_BASE_SIGSLOT_H__
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
#endif // WEBRTC_WIN
|
||||
|
||||
#if defined(WEBRTC_POSIX)
|
||||
#ifdef BSD
|
||||
#ifdef WEBRTC_BSD
|
||||
#include <stdlib.h>
|
||||
#else // BSD
|
||||
#include <alloca.h>
|
||||
|
|
|
@ -27,10 +27,24 @@ PlatformThreadId CurrentThreadId() {
|
|||
#elif defined(WEBRTC_POSIX)
|
||||
#if defined(WEBRTC_MAC) || defined(WEBRTC_IOS)
|
||||
ret = pthread_mach_thread_np(pthread_self());
|
||||
#elif defined(WEBRTC_LINUX)
|
||||
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_GONK)
|
||||
ret = syscall(__NR_gettid);
|
||||
#elif defined(WEBRTC_ANDROID)
|
||||
ret = gettid();
|
||||
#elif defined(__NetBSD__)
|
||||
return _lwp_self();
|
||||
#elif defined(__DragonFly__)
|
||||
return lwp_gettid();
|
||||
#elif defined(__OpenBSD__)
|
||||
return reinterpret_cast<uintptr_t> (pthread_self());
|
||||
#elif defined(__FreeBSD__)
|
||||
#if __FreeBSD_version > 900030
|
||||
return pthread_getthreadid_np();
|
||||
#else
|
||||
long lwpid;
|
||||
thr_self(&lwpid);
|
||||
return lwpid;
|
||||
#endif
|
||||
#else
|
||||
// Default implementation for nacl and solaris.
|
||||
ret = reinterpret_cast<pid_t>(pthread_self());
|
||||
|
|
|
@ -23,12 +23,30 @@
|
|||
'cflags!': [
|
||||
'-mfpu=vfpv3-d16',
|
||||
],
|
||||
'cflags_mozilla!': [
|
||||
'-mfpu=vfpv3-d16',
|
||||
],
|
||||
'asflags!': [
|
||||
'-mfpu=vfpv3-d16',
|
||||
],
|
||||
'asflags_mozilla!': [
|
||||
'-mfpu=vfpv3-d16',
|
||||
],
|
||||
'conditions': [
|
||||
# "-mfpu=neon" is not requried for arm64 in GCC.
|
||||
['target_arch!="arm64"', {
|
||||
'cflags': [
|
||||
'-mfpu=neon',
|
||||
],
|
||||
],
|
||||
'cflags_mozilla': [
|
||||
'-mfpu=neon',
|
||||
],
|
||||
'asflags': [
|
||||
'-mfpu=neon',
|
||||
],
|
||||
'asflags_mozilla': [
|
||||
'-mfpu=neon',
|
||||
],
|
||||
}],
|
||||
],
|
||||
}
|
||||
|
|
|
@ -40,7 +40,13 @@
|
|||
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
|
||||
'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8',
|
||||
'webrtc_vp9_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp9',
|
||||
'webrtc_h264_dir%': '<(webrtc_root)/modules/video_coding/codecs/h264',
|
||||
'include_g711%': 1,
|
||||
'include_g722%': 1,
|
||||
'include_ilbc%': 1,
|
||||
'include_opus%': 1,
|
||||
'include_isac%': 1,
|
||||
'include_pcm16b%': 1,
|
||||
'opus_dir%': '<(DEPTH)/third_party/opus',
|
||||
},
|
||||
'build_with_chromium%': '<(build_with_chromium)',
|
||||
|
@ -50,7 +56,15 @@
|
|||
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
|
||||
'webrtc_vp8_dir%': '<(webrtc_vp8_dir)',
|
||||
'webrtc_vp9_dir%': '<(webrtc_vp9_dir)',
|
||||
'webrtc_h264_dir%': '<(webrtc_h264_dir)',
|
||||
|
||||
'include_g711%': '<(include_g711)',
|
||||
'include_g722%': '<(include_g722)',
|
||||
'include_ilbc%': '<(include_ilbc)',
|
||||
'include_opus%': '<(include_opus)',
|
||||
'include_isac%': '<(include_isac)',
|
||||
'include_pcm16b%': '<(include_pcm16b)',
|
||||
|
||||
'rtc_relative_path%': 1,
|
||||
'external_libraries%': '0',
|
||||
'json_root%': '<(DEPTH)/third_party/jsoncpp/source/include/',
|
||||
|
@ -128,6 +142,7 @@
|
|||
# Exclude internal ADM since Chromium uses its own IO handling.
|
||||
'include_internal_audio_device%': 0,
|
||||
|
||||
'include_ndk_cpu_features%': 0,
|
||||
}, { # Settings for the standalone (not-in-Chromium) build.
|
||||
# TODO(andrew): For now, disable the Chrome plugins, which causes a
|
||||
# flood of chromium-style warnings. Investigate enabling them:
|
||||
|
@ -136,6 +151,7 @@
|
|||
|
||||
'include_pulse_audio%': 1,
|
||||
'include_internal_audio_device%': 1,
|
||||
'include_ndk_cpu_features%': 0,
|
||||
}],
|
||||
['build_with_libjingle==1', {
|
||||
'include_tests%': 0,
|
||||
|
@ -144,6 +160,26 @@
|
|||
'include_tests%': 1,
|
||||
'restrict_webrtc_logging%': 0,
|
||||
}],
|
||||
['OS=="linux"', {
|
||||
'include_alsa_audio%': 1,
|
||||
}, {
|
||||
'include_alsa_audio%': 0,
|
||||
}],
|
||||
['OS=="openbsd"', {
|
||||
'include_sndio_audio%': 1,
|
||||
}, {
|
||||
'include_sndio_audio%': 0,
|
||||
}],
|
||||
['OS=="solaris" or (OS!="openbsd" and os_bsd==1)', {
|
||||
'include_pulse_audio%': 1,
|
||||
}, {
|
||||
'include_pulse_audio%': 0,
|
||||
}],
|
||||
['OS=="linux" or OS=="solaris" or os_bsd==1', {
|
||||
'include_v4l2_video_capture%': 1,
|
||||
}, {
|
||||
'include_v4l2_video_capture%': 0,
|
||||
}],
|
||||
['OS=="ios"', {
|
||||
'build_libjpeg%': 0,
|
||||
'enable_protobuf%': 0,
|
||||
|
@ -151,7 +187,7 @@
|
|||
['target_arch=="arm" or target_arch=="arm64"', {
|
||||
'prefer_fixed_point%': 1,
|
||||
}],
|
||||
['OS!="ios" and (target_arch!="arm" or arm_version>=7) and target_arch!="mips64el"', {
|
||||
['OS!="ios" and (target_arch!="arm" or arm_version>=7) and target_arch!="mips64el" and build_with_mozilla==0', {
|
||||
'rtc_use_openmax_dl%': 1,
|
||||
}, {
|
||||
'rtc_use_openmax_dl%': 0,
|
||||
|
@ -160,6 +196,11 @@
|
|||
},
|
||||
'target_defaults': {
|
||||
'conditions': [
|
||||
['moz_widget_toolkit_gonk==1', {
|
||||
'defines' : [
|
||||
'WEBRTC_GONK',
|
||||
],
|
||||
}],
|
||||
['restrict_webrtc_logging==1', {
|
||||
'defines': ['WEBRTC_RESTRICT_LOGGING',],
|
||||
}],
|
||||
|
@ -273,18 +314,37 @@
|
|||
],
|
||||
'conditions': [
|
||||
['arm_version>=7', {
|
||||
'defines': ['WEBRTC_ARCH_ARM_V7',],
|
||||
'defines': ['WEBRTC_ARCH_ARM_V7',
|
||||
'WEBRTC_BUILD_NEON_LIBS'],
|
||||
'conditions': [
|
||||
['arm_neon==1', {
|
||||
'defines': ['WEBRTC_ARCH_ARM_NEON',],
|
||||
}],
|
||||
['arm_neon==0 and OS=="android"', {
|
||||
['arm_neon==0 and (OS=="android" or moz_widget_toolkit_gonk==1)', {
|
||||
'defines': ['WEBRTC_DETECT_ARM_NEON',],
|
||||
}],
|
||||
],
|
||||
}],
|
||||
],
|
||||
}],
|
||||
['os_bsd==1', {
|
||||
'defines': [
|
||||
'WEBRTC_BSD',
|
||||
'WEBRTC_THREAD_RR',
|
||||
],
|
||||
}],
|
||||
['OS=="dragonfly" or OS=="netbsd"', {
|
||||
'defines': [
|
||||
# doesn't support pthread_condattr_setclock
|
||||
'WEBRTC_CLOCK_TYPE_REALTIME',
|
||||
],
|
||||
}],
|
||||
['OS=="openbsd"', {
|
||||
'defines' : [
|
||||
'WEBRTC_AUDIO_SNDIO',
|
||||
],
|
||||
}],
|
||||
# Mozilla: if we support Mozilla on MIPS, we'll need to mod the cflags entries here
|
||||
['target_arch=="mipsel" and mips_arch_variant!="r6" and android_webview_build==0', {
|
||||
'defines': [
|
||||
'MIPS32_LE',
|
||||
|
@ -332,6 +392,13 @@
|
|||
],
|
||||
}],
|
||||
['OS=="linux"', {
|
||||
# 'conditions': [
|
||||
# ['have_clock_monotonic==1', {
|
||||
# 'defines': [
|
||||
# 'WEBRTC_CLOCK_TYPE_REALTIME',
|
||||
# ],
|
||||
# }],
|
||||
# ],
|
||||
'defines': [
|
||||
'WEBRTC_LINUX',
|
||||
],
|
||||
|
@ -355,17 +422,23 @@
|
|||
# Re-enable some warnings that Chromium disables.
|
||||
'msvs_disabled_warnings!': [4189,],
|
||||
}],
|
||||
# used on GONK as well
|
||||
['enable_android_opensl==1 and (OS=="android" or moz_widget_toolkit_gonk==1)', {
|
||||
'defines': [
|
||||
'WEBRTC_ANDROID_OPENSLES',
|
||||
],
|
||||
}],
|
||||
['moz_webrtc_omx==1', {
|
||||
'defines' : [
|
||||
'MOZ_WEBRTC_OMX'
|
||||
],
|
||||
}],
|
||||
['OS=="android"', {
|
||||
'defines': [
|
||||
'WEBRTC_LINUX',
|
||||
'WEBRTC_ANDROID',
|
||||
],
|
||||
'conditions': [
|
||||
['enable_android_opensl==1', {
|
||||
'defines': [
|
||||
'WEBRTC_ANDROID_OPENSLES',
|
||||
],
|
||||
}],
|
||||
['clang!=1', {
|
||||
# The Android NDK doesn't provide optimized versions of these
|
||||
# functions. Ensure they are disabled for all compilers.
|
||||
|
|
|
@ -48,5 +48,7 @@
|
|||
},
|
||||
],
|
||||
},
|
||||
# }],
|
||||
# ],
|
||||
],
|
||||
}
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_COMMON_AUDIO_AUDIO_RING_BUFFER_H_
|
||||
#define WEBRTC_COMMON_AUDIO_AUDIO_RING_BUFFER_H_
|
||||
#include <stddef.h>
|
||||
#include <vector>
|
||||
|
||||
|
@ -47,3 +49,4 @@ class AudioRingBuffer final {
|
|||
};
|
||||
|
||||
} // namespace webrtc
|
||||
#endif
|
||||
|
|
|
@ -200,6 +200,11 @@
|
|||
'resampler/sinc_resampler_sse.cc',
|
||||
],
|
||||
'cflags': ['-msse2',],
|
||||
'conditions': [
|
||||
[ 'os_posix == 1', {
|
||||
'cflags_mozilla': ['-msse2',],
|
||||
}],
|
||||
],
|
||||
'xcode_settings': {
|
||||
'OTHER_CFLAGS': ['-msse2',],
|
||||
},
|
||||
|
|
|
@ -17,9 +17,12 @@
|
|||
#define WEBRTC_RESAMPLER_RESAMPLER_H_
|
||||
|
||||
#include "webrtc/typedefs.h"
|
||||
#include <speex/speex_resampler.h>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
#define FIXED_RATE_RESAMPLER 0x10
|
||||
|
||||
// All methods return 0 on success and -1 on failure.
|
||||
class Resampler
|
||||
{
|
||||
|
@ -40,52 +43,11 @@ public:
|
|||
int maxLen, int &outLen);
|
||||
|
||||
private:
|
||||
enum ResamplerMode
|
||||
{
|
||||
kResamplerMode1To1,
|
||||
kResamplerMode1To2,
|
||||
kResamplerMode1To3,
|
||||
kResamplerMode1To4,
|
||||
kResamplerMode1To6,
|
||||
kResamplerMode1To12,
|
||||
kResamplerMode2To3,
|
||||
kResamplerMode2To11,
|
||||
kResamplerMode4To11,
|
||||
kResamplerMode8To11,
|
||||
kResamplerMode11To16,
|
||||
kResamplerMode11To32,
|
||||
kResamplerMode2To1,
|
||||
kResamplerMode3To1,
|
||||
kResamplerMode4To1,
|
||||
kResamplerMode6To1,
|
||||
kResamplerMode12To1,
|
||||
kResamplerMode3To2,
|
||||
kResamplerMode11To2,
|
||||
kResamplerMode11To4,
|
||||
kResamplerMode11To8
|
||||
};
|
||||
SpeexResamplerState* state_;
|
||||
|
||||
// Generic pointers since we don't know what states we'll need
|
||||
void* state1_;
|
||||
void* state2_;
|
||||
void* state3_;
|
||||
|
||||
// Storage if needed
|
||||
int16_t* in_buffer_;
|
||||
int16_t* out_buffer_;
|
||||
int in_buffer_size_;
|
||||
int out_buffer_size_;
|
||||
int in_buffer_size_max_;
|
||||
int out_buffer_size_max_;
|
||||
|
||||
int my_in_frequency_khz_;
|
||||
int my_out_frequency_khz_;
|
||||
ResamplerMode my_mode_;
|
||||
int num_channels_;
|
||||
|
||||
// Extra instance for stereo
|
||||
Resampler* slave_left_;
|
||||
Resampler* slave_right_;
|
||||
int in_freq_;
|
||||
int out_freq_;
|
||||
int channels_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
#include <string.h>
|
||||
|
||||
#include "webrtc/common_audio/include/audio_util.h"
|
||||
#include "webrtc/common_audio/resampler/include/resampler.h"
|
||||
#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -8,6 +8,8 @@
|
|||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <math.h>
|
||||
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
|
||||
#include "webrtc/common_audio/resampler/include/resampler.h"
|
||||
|
@ -26,7 +28,7 @@ const int kRates[] = {
|
|||
8000,
|
||||
16000,
|
||||
32000,
|
||||
44000,
|
||||
44100,
|
||||
48000,
|
||||
kMaxRate
|
||||
};
|
||||
|
@ -34,26 +36,19 @@ const size_t kRatesSize = sizeof(kRates) / sizeof(*kRates);
|
|||
const int kMaxChannels = 2;
|
||||
const size_t kDataSize = static_cast<size_t> (kMaxChannels * kMaxRate / 100);
|
||||
|
||||
// TODO(andrew): should we be supporting these combinations?
|
||||
bool ValidRates(int in_rate, int out_rate) {
|
||||
// Not the most compact notation, for clarity.
|
||||
if ((in_rate == 44000 && (out_rate == 48000 || out_rate == 96000)) ||
|
||||
(out_rate == 44000 && (in_rate == 48000 || in_rate == 96000))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
class ResamplerTest : public testing::Test {
|
||||
protected:
|
||||
ResamplerTest();
|
||||
virtual void SetUp();
|
||||
virtual void TearDown();
|
||||
void RunResampleTest(int channels,
|
||||
int src_sample_rate_hz,
|
||||
int dst_sample_rate_hz);
|
||||
|
||||
Resampler rs_;
|
||||
int16_t data_in_[kDataSize];
|
||||
int16_t data_out_[kDataSize];
|
||||
int16_t data_reference_[kDataSize];
|
||||
};
|
||||
|
||||
ResamplerTest::ResamplerTest() {}
|
||||
|
@ -78,59 +73,132 @@ TEST_F(ResamplerTest, Reset) {
|
|||
ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j]
|
||||
<< ", channels: " << kNumChannels[k];
|
||||
SCOPED_TRACE(ss.str());
|
||||
if (ValidRates(kRates[i], kRates[j]))
|
||||
EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kNumChannels[k]));
|
||||
else
|
||||
EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j], kNumChannels[k]));
|
||||
EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kTypes[k]));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(tlegrand): Replace code inside the two tests below with a function
|
||||
// with number of channels and ResamplerType as input.
|
||||
// Sets the signal value to increase by |data| with every sample. Floats are
|
||||
// used so non-integer values result in rounding error, but not an accumulating
|
||||
// error.
|
||||
void SetMonoFrame(int16_t* buffer, float data, int sample_rate_hz) {
|
||||
for (int i = 0; i < sample_rate_hz / 100; i++) {
|
||||
buffer[i] = data * i;
|
||||
}
|
||||
}
|
||||
|
||||
// Sets the signal value to increase by |left| and |right| with every sample in
|
||||
// each channel respectively.
|
||||
void SetStereoFrame(int16_t* buffer, float left, float right,
|
||||
int sample_rate_hz) {
|
||||
for (int i = 0; i < sample_rate_hz / 100; i++) {
|
||||
buffer[i * 2] = left * i;
|
||||
buffer[i * 2 + 1] = right * i;
|
||||
}
|
||||
}
|
||||
|
||||
// Computes the best SNR based on the error between |ref_frame| and
|
||||
// |test_frame|. It allows for a sample delay between the signals to
|
||||
// compensate for the resampling delay.
|
||||
float ComputeSNR(const int16_t* reference, const int16_t* test,
|
||||
int sample_rate_hz, int channels, int max_delay) {
|
||||
float best_snr = 0;
|
||||
int best_delay = 0;
|
||||
int samples_per_channel = sample_rate_hz/100;
|
||||
for (int delay = 0; delay < max_delay; delay++) {
|
||||
float mse = 0;
|
||||
float variance = 0;
|
||||
for (int i = 0; i < samples_per_channel * channels - delay; i++) {
|
||||
int error = reference[i] - test[i + delay];
|
||||
mse += error * error;
|
||||
variance += reference[i] * reference[i];
|
||||
}
|
||||
float snr = 100; // We assign 100 dB to the zero-error case.
|
||||
if (mse > 0)
|
||||
snr = 10 * log10(variance / mse);
|
||||
if (snr > best_snr) {
|
||||
best_snr = snr;
|
||||
best_delay = delay;
|
||||
}
|
||||
}
|
||||
printf("SNR=%.1f dB at delay=%d\n", best_snr, best_delay);
|
||||
return best_snr;
|
||||
}
|
||||
|
||||
void ResamplerTest::RunResampleTest(int channels,
|
||||
int src_sample_rate_hz,
|
||||
int dst_sample_rate_hz) {
|
||||
Resampler resampler; // Create a new one with every test.
|
||||
const int16_t kSrcLeft = 60; // Shouldn't overflow for any used sample rate.
|
||||
const int16_t kSrcRight = 30;
|
||||
const float kResamplingFactor = (1.0 * src_sample_rate_hz) /
|
||||
dst_sample_rate_hz;
|
||||
const float kDstLeft = kResamplingFactor * kSrcLeft;
|
||||
const float kDstRight = kResamplingFactor * kSrcRight;
|
||||
if (channels == 1)
|
||||
SetMonoFrame(data_in_, kSrcLeft, src_sample_rate_hz);
|
||||
else
|
||||
SetStereoFrame(data_in_, kSrcLeft, kSrcRight, src_sample_rate_hz);
|
||||
|
||||
if (channels == 1) {
|
||||
SetMonoFrame(data_out_, 0, dst_sample_rate_hz);
|
||||
SetMonoFrame(data_reference_, kDstLeft, dst_sample_rate_hz);
|
||||
} else {
|
||||
SetStereoFrame(data_out_, 0, 0, dst_sample_rate_hz);
|
||||
SetStereoFrame(data_reference_, kDstLeft, kDstRight, dst_sample_rate_hz);
|
||||
}
|
||||
|
||||
// The speex resampler has a known delay dependent on quality and rates,
|
||||
// which we approximate here. Multiplying by two gives us a crude maximum
|
||||
// for any resampling, as the old resampler typically (but not always)
|
||||
// has lower delay. The actual delay is calculated internally based on the
|
||||
// filter length in the QualityMap.
|
||||
static const int kInputKernelDelaySamples = 16*3;
|
||||
const int max_delay = std::min(1.0f, 1/kResamplingFactor) *
|
||||
kInputKernelDelaySamples * channels * 2;
|
||||
printf("(%d, %d Hz) -> (%d, %d Hz) ", // SNR reported on the same line later.
|
||||
channels, src_sample_rate_hz, channels, dst_sample_rate_hz);
|
||||
|
||||
int in_length = channels * src_sample_rate_hz / 100;
|
||||
int out_length = 0;
|
||||
EXPECT_EQ(0, rs_.Reset(src_sample_rate_hz, dst_sample_rate_hz,
|
||||
(channels == 1 ?
|
||||
kResamplerSynchronous :
|
||||
kResamplerSynchronousStereo)));
|
||||
EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize,
|
||||
out_length));
|
||||
EXPECT_EQ(channels * dst_sample_rate_hz / 100, out_length);
|
||||
|
||||
// EXPECT_EQ(0, Resample(src_frame_, &resampler, &dst_frame_));
|
||||
EXPECT_GT(ComputeSNR(data_reference_, data_out_, dst_sample_rate_hz,
|
||||
channels, max_delay), 40.0f);
|
||||
}
|
||||
|
||||
TEST_F(ResamplerTest, Mono) {
|
||||
const int kChannels = 1;
|
||||
for (size_t i = 0; i < kRatesSize; ++i) {
|
||||
for (size_t j = 0; j < kRatesSize; ++j) {
|
||||
std::ostringstream ss;
|
||||
ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j];
|
||||
SCOPED_TRACE(ss.str());
|
||||
|
||||
if (ValidRates(kRates[i], kRates[j])) {
|
||||
int in_length = kRates[i] / 100;
|
||||
int out_length = 0;
|
||||
EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kChannels));
|
||||
EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize,
|
||||
out_length));
|
||||
EXPECT_EQ(kRates[j] / 100, out_length);
|
||||
} else {
|
||||
EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j], kChannels));
|
||||
}
|
||||
// We don't attempt to be exhaustive here, but just get good coverage. Some
|
||||
// combinations of rates will not be resampled, and some give an odd
|
||||
// resampling factor which makes it more difficult to evaluate.
|
||||
const int kSampleRates[] = {16000, 32000, 44100, 48000};
|
||||
const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
|
||||
for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) {
|
||||
for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) {
|
||||
RunResampleTest(kChannels, kSampleRates[src_rate], kSampleRates[dst_rate]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TEST_F(ResamplerTest, Stereo) {
|
||||
const int kChannels = 2;
|
||||
for (size_t i = 0; i < kRatesSize; ++i) {
|
||||
for (size_t j = 0; j < kRatesSize; ++j) {
|
||||
std::ostringstream ss;
|
||||
ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j];
|
||||
SCOPED_TRACE(ss.str());
|
||||
|
||||
if (ValidRates(kRates[i], kRates[j])) {
|
||||
int in_length = kChannels * kRates[i] / 100;
|
||||
int out_length = 0;
|
||||
EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j],
|
||||
kChannels));
|
||||
EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize,
|
||||
out_length));
|
||||
EXPECT_EQ(kChannels * kRates[j] / 100, out_length);
|
||||
} else {
|
||||
EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j],
|
||||
kChannels));
|
||||
}
|
||||
// We don't attempt to be exhaustive here, but just get good coverage. Some
|
||||
// combinations of rates will not be resampled, and some give an odd
|
||||
// resampling factor which makes it more difficult to evaluate.
|
||||
const int kSampleRates[] = {16000, 32000, 44100, 48000};
|
||||
const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
|
||||
for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) {
|
||||
for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) {
|
||||
RunResampleTest(kChannels, kSampleRates[src_rate], kSampleRates[dst_rate]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,11 +26,11 @@ float SincResampler::Convolve_NEON(const float* input_ptr, const float* k1,
|
|||
|
||||
const float* upper = input_ptr + kKernelSize;
|
||||
for (; input_ptr < upper; ) {
|
||||
m_input = vld1q_f32(input_ptr);
|
||||
m_input = vld1q_f32((const float32_t *) input_ptr);
|
||||
input_ptr += 4;
|
||||
m_sums1 = vmlaq_f32(m_sums1, m_input, vld1q_f32(k1));
|
||||
m_sums1 = vmlaq_f32(m_sums1, m_input, vld1q_f32((const float32_t *) k1));
|
||||
k1 += 4;
|
||||
m_sums2 = vmlaq_f32(m_sums2, m_input, vld1q_f32(k2));
|
||||
m_sums2 = vmlaq_f32(m_sums2, m_input, vld1q_f32((const float32_t *) k2));
|
||||
k2 += 4;
|
||||
}
|
||||
|
||||
|
|
|
@ -56,9 +56,6 @@ WavReader::~WavReader() {
|
|||
}
|
||||
|
||||
size_t WavReader::ReadSamples(size_t num_samples, int16_t* samples) {
|
||||
#ifndef WEBRTC_ARCH_LITTLE_ENDIAN
|
||||
#error "Need to convert samples to big-endian when reading from WAV file"
|
||||
#endif
|
||||
// There could be metadata after the audio; ensure we don't read it.
|
||||
num_samples = std::min(rtc::checked_cast<uint32_t>(num_samples),
|
||||
num_samples_remaining_);
|
||||
|
@ -68,6 +65,12 @@ size_t WavReader::ReadSamples(size_t num_samples, int16_t* samples) {
|
|||
CHECK(read == num_samples || feof(file_handle_));
|
||||
CHECK_LE(read, num_samples_remaining_);
|
||||
num_samples_remaining_ -= rtc::checked_cast<uint32_t>(read);
|
||||
#ifndef WEBRTC_ARCH_LITTLE_ENDIAN
|
||||
//convert to big-endian
|
||||
for(size_t idx = 0; idx < num_samples; idx++) {
|
||||
samples[idx] = (samples[idx]<<8) | (samples[idx]>>8);
|
||||
}
|
||||
#endif
|
||||
return read;
|
||||
}
|
||||
|
||||
|
@ -115,10 +118,17 @@ WavWriter::~WavWriter() {
|
|||
|
||||
void WavWriter::WriteSamples(const int16_t* samples, size_t num_samples) {
|
||||
#ifndef WEBRTC_ARCH_LITTLE_ENDIAN
|
||||
#error "Need to convert samples to little-endian when writing to WAV file"
|
||||
#endif
|
||||
int16_t * le_samples = new int16_t[num_samples];
|
||||
for(size_t idx = 0; idx < num_samples; idx++) {
|
||||
le_samples[idx] = (samples[idx]<<8) | (samples[idx]>>8);
|
||||
}
|
||||
const size_t written =
|
||||
fwrite(le_samples, sizeof(*le_samples), num_samples, file_handle_);
|
||||
delete []le_samples;
|
||||
#else
|
||||
const size_t written =
|
||||
fwrite(samples, sizeof(*samples), num_samples, file_handle_);
|
||||
#endif
|
||||
CHECK_EQ(num_samples, written);
|
||||
num_samples_ += static_cast<uint32_t>(written);
|
||||
CHECK(written <= std::numeric_limits<uint32_t>::max() ||
|
||||
|
|
|
@ -129,7 +129,39 @@ static inline std::string ReadFourCC(uint32_t x) {
|
|||
return std::string(reinterpret_cast<char*>(&x), 4);
|
||||
}
|
||||
#else
|
||||
#error "Write be-to-le conversion functions"
|
||||
static inline void WriteLE16(uint16_t* f, uint16_t x) {
|
||||
*f = ((x << 8) & 0xff00) | ( ( x >> 8) & 0x00ff);
|
||||
}
|
||||
|
||||
static inline void WriteLE32(uint32_t* f, uint32_t x) {
|
||||
*f = ( (x & 0x000000ff) << 24 )
|
||||
| ((x & 0x0000ff00) << 8)
|
||||
| ((x & 0x00ff0000) >> 8)
|
||||
| ((x & 0xff000000) >> 24 );
|
||||
}
|
||||
|
||||
static inline void WriteFourCC(uint32_t* f, char a, char b, char c, char d) {
|
||||
*f = (static_cast<uint32_t>(a) << 24 )
|
||||
| (static_cast<uint32_t>(b) << 16)
|
||||
| (static_cast<uint32_t>(c) << 8)
|
||||
| (static_cast<uint32_t>(d) );
|
||||
}
|
||||
|
||||
static inline uint16_t ReadLE16(uint16_t x) {
|
||||
return (( x & 0x00ff) << 8 )| ((x & 0xff00)>>8);
|
||||
}
|
||||
|
||||
static inline uint32_t ReadLE32(uint32_t x) {
|
||||
return ( (x & 0x000000ff) << 24 )
|
||||
| ( (x & 0x0000ff00) << 8 )
|
||||
| ( (x & 0x00ff0000) >> 8)
|
||||
| ( (x & 0xff000000) >> 24 );
|
||||
}
|
||||
|
||||
static inline std::string ReadFourCC(uint32_t x) {
|
||||
x = ReadLE32(x);
|
||||
return std::string(reinterpret_cast<char*>(&x), 4);
|
||||
}
|
||||
#endif
|
||||
|
||||
static inline uint32_t RiffChunkSize(uint32_t bytes_in_payload) {
|
||||
|
|
|
@ -435,7 +435,7 @@ enum NsModes // type of Noise Suppression
|
|||
kNsLowSuppression, // lowest suppression
|
||||
kNsModerateSuppression,
|
||||
kNsHighSuppression,
|
||||
kNsVeryHighSuppression, // highest suppression
|
||||
kNsVeryHighSuppression // highest suppression
|
||||
};
|
||||
|
||||
enum AgcModes // type of Automatic Gain Control
|
||||
|
@ -460,7 +460,7 @@ enum EcModes // type of Echo Control
|
|||
kEcDefault, // platform default
|
||||
kEcConference, // conferencing default (aggressive AEC)
|
||||
kEcAec, // Acoustic Echo Cancellation
|
||||
kEcAecm, // AEC mobile
|
||||
kEcAecm // AEC mobile
|
||||
};
|
||||
|
||||
// AECM modes
|
||||
|
@ -496,7 +496,8 @@ enum AudioLayers
|
|||
kAudioWindowsWave = 1,
|
||||
kAudioWindowsCore = 2,
|
||||
kAudioLinuxAlsa = 3,
|
||||
kAudioLinuxPulse = 4
|
||||
kAudioLinuxPulse = 4,
|
||||
kAudioSndio = 5
|
||||
};
|
||||
|
||||
// TODO(henrika): to be removed.
|
||||
|
@ -513,7 +514,7 @@ enum NetEqModes // NetEQ playout configurations
|
|||
kNetEqFax = 2,
|
||||
// Minimal buffer management. Inserts zeros for lost packets and during
|
||||
// buffer increases.
|
||||
kNetEqOff = 3,
|
||||
kNetEqOff = 3
|
||||
};
|
||||
|
||||
// TODO(henrika): to be removed.
|
||||
|
@ -529,7 +530,7 @@ enum AmrMode
|
|||
{
|
||||
kRfc3267BwEfficient = 0,
|
||||
kRfc3267OctetAligned = 1,
|
||||
kRfc3267FileStorage = 2,
|
||||
kRfc3267FileStorage = 2
|
||||
};
|
||||
|
||||
// ==================================================================
|
||||
|
@ -556,6 +557,16 @@ enum RawVideoType
|
|||
kVideoUnknown = 99
|
||||
};
|
||||
|
||||
enum VideoReceiveState
|
||||
{
|
||||
kReceiveStateInitial, // No video decoded yet
|
||||
kReceiveStateNormal,
|
||||
kReceiveStatePreemptiveNACK, // NACK sent for missing packet, no decode stall/fail yet
|
||||
kReceiveStateWaitingKey, // Decoding stalled, waiting for keyframe or NACK
|
||||
kReceiveStateDecodingWithErrors, // Decoding with errors, waiting for keyframe or NACK
|
||||
kReceiveStateNoIncoming, // No errors, but no incoming video since last decode
|
||||
};
|
||||
|
||||
// Video codec
|
||||
enum { kConfigParameterSize = 128};
|
||||
enum { kPayloadNameSize = 32};
|
||||
|
@ -632,6 +643,10 @@ struct VideoCodecVP9 {
|
|||
// H264 specific.
|
||||
struct VideoCodecH264 {
|
||||
VideoCodecProfile profile;
|
||||
uint8_t profile_byte;
|
||||
uint8_t constraints;
|
||||
uint8_t level;
|
||||
uint8_t packetizationMode; // 0 or 1
|
||||
bool frameDroppingOn;
|
||||
int keyFrameInterval;
|
||||
// These are NULL/0 if not externally negotiated.
|
||||
|
@ -699,6 +714,8 @@ struct VideoCodec {
|
|||
|
||||
unsigned short width;
|
||||
unsigned short height;
|
||||
// width & height modulo resolution_divisor must be 0
|
||||
unsigned char resolution_divisor;
|
||||
|
||||
unsigned int startBitrate; // kilobits/sec.
|
||||
unsigned int maxBitrate; // kilobits/sec.
|
||||
|
@ -775,6 +792,26 @@ struct OverUseDetectorOptions {
|
|||
double initial_threshold;
|
||||
};
|
||||
|
||||
enum CPULoadState {
|
||||
kLoadRelaxed = 0,
|
||||
kLoadNormal,
|
||||
kLoadStressed,
|
||||
kLoadLast,
|
||||
};
|
||||
|
||||
class CPULoadStateObserver {
|
||||
public:
|
||||
virtual void onLoadStateChanged(CPULoadState aNewState) = 0;
|
||||
virtual ~CPULoadStateObserver() {};
|
||||
};
|
||||
|
||||
class CPULoadStateCallbackInvoker {
|
||||
public:
|
||||
virtual void AddObserver(CPULoadStateObserver* aObserver) = 0;
|
||||
virtual void RemoveObserver(CPULoadStateObserver* aObserver) = 0;
|
||||
virtual ~CPULoadStateCallbackInvoker() {};
|
||||
};
|
||||
|
||||
// This structure will have the information about when packet is actually
|
||||
// received by socket.
|
||||
struct PacketTime {
|
||||
|
|
|
@ -246,6 +246,28 @@ int ConvertToI420(VideoType src_video_type,
|
|||
dst_width = dst_frame->height();
|
||||
dst_height =dst_frame->width();
|
||||
}
|
||||
#ifdef WEBRTC_GONK
|
||||
if (src_video_type == kYV12) {
|
||||
// In gralloc buffer, yv12 color format's cb and cr's strides are aligned
|
||||
// to 16 Bytes boundary. See /system/core/include/system/graphics.h
|
||||
int stride_y = (src_width + 15) & ~0x0F;
|
||||
int stride_uv = (((stride_y + 1) / 2) + 15) & ~0x0F;
|
||||
return libyuv::I420Rotate(src_frame,
|
||||
stride_y,
|
||||
src_frame + (stride_y * src_height) + (stride_uv * ((src_height + 1) / 2)),
|
||||
stride_uv,
|
||||
src_frame + (stride_y * src_height),
|
||||
stride_uv,
|
||||
dst_frame->buffer(kYPlane),
|
||||
dst_frame->stride(kYPlane),
|
||||
dst_frame->buffer(kUPlane),
|
||||
dst_frame->stride(kUPlane),
|
||||
dst_frame->buffer(kVPlane),
|
||||
dst_frame->stride(kVPlane),
|
||||
src_width, src_height,
|
||||
ConvertRotationMode(rotation));
|
||||
}
|
||||
#endif
|
||||
return libyuv::ConvertToI420(src_frame, sample_size,
|
||||
dst_frame->buffer(kYPlane),
|
||||
dst_frame->stride(kYPlane),
|
||||
|
|
|
@ -43,6 +43,11 @@ int Plane::MaybeResize(int new_size) {
|
|||
return 0;
|
||||
rtc::scoped_ptr<uint8_t, AlignedFreeDeleter> new_buffer(
|
||||
static_cast<uint8_t*>(AlignedMalloc(new_size, kBufferAlignment)));
|
||||
|
||||
if (!new_buffer.get()) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (buffer_.get()) {
|
||||
memcpy(new_buffer.get(), buffer_.get(), plane_size_);
|
||||
}
|
||||
|
|
|
@ -36,7 +36,9 @@
|
|||
#define WEBRTC_CODEC_AVT
|
||||
|
||||
// PCM16 is useful for testing and incurs only a small binary size cost.
|
||||
#ifndef WEBRTC_CODEC_PCM16
|
||||
#define WEBRTC_CODEC_PCM16
|
||||
#endif
|
||||
|
||||
// iLBC and Redundancy coding are excluded from Chromium and Mozilla
|
||||
// builds to reduce binary size.
|
||||
|
@ -65,6 +67,7 @@
|
|||
#define WEBRTC_VOICE_ENGINE_AGC // Near-end AGC
|
||||
#define WEBRTC_VOICE_ENGINE_ECHO // Near-end AEC
|
||||
#define WEBRTC_VOICE_ENGINE_NR // Near-end NS
|
||||
#define WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
|
||||
|
||||
#if !defined(WEBRTC_ANDROID) && !defined(WEBRTC_IOS)
|
||||
#define WEBRTC_VOICE_ENGINE_TYPING_DETECTION // Typing detection
|
||||
|
|
|
@ -12,16 +12,22 @@
|
|||
'codecs/interfaces.gypi',
|
||||
'codecs/cng/cng.gypi',
|
||||
'codecs/g711/g711.gypi',
|
||||
'codecs/g722/g722.gypi',
|
||||
'codecs/ilbc/ilbc.gypi',
|
||||
'codecs/isac/isac.gypi',
|
||||
'codecs/isac/isacfix.gypi',
|
||||
'codecs/pcm16b/pcm16b.gypi',
|
||||
'codecs/red/red.gypi',
|
||||
'main/acm2/audio_coding_module.gypi',
|
||||
'neteq/neteq.gypi',
|
||||
],
|
||||
'conditions': [
|
||||
['include_g722==1', {
|
||||
'includes': ['codecs/g722/g722.gypi',],
|
||||
}],
|
||||
['include_ilbc==1', {
|
||||
'includes': ['codecs/ilbc/ilbc.gypi',],
|
||||
}],
|
||||
['include_isac==1', {
|
||||
'includes': ['codecs/isac/isac.gypi',
|
||||
'codecs/isac/isacfix.gypi',],
|
||||
}],
|
||||
['include_opus==1', {
|
||||
'includes': ['codecs/opus/opus.gypi',],
|
||||
}],
|
||||
|
|
|
@ -32,7 +32,7 @@ class AudioDecoder {
|
|||
enum { kNotImplemented = -2 };
|
||||
|
||||
AudioDecoder() = default;
|
||||
virtual ~AudioDecoder() = default;
|
||||
virtual ~AudioDecoder() {} //= default;
|
||||
|
||||
// Decodes |encode_len| bytes from |encoded| and writes the result in
|
||||
// |decoded|. The maximum bytes allowed to be written into |decoded| is
|
||||
|
|
|
@ -7,6 +7,9 @@
|
|||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
{
|
||||
'variables': {
|
||||
'opus_complexity%': 0,
|
||||
},
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'webrtc_opus',
|
||||
|
@ -30,8 +33,17 @@
|
|||
['build_with_mozilla==1', {
|
||||
# Mozilla provides its own build of the opus library.
|
||||
'include_dirs': [
|
||||
'$(DIST)/include/opus',
|
||||
]
|
||||
'/media/libopus/include',
|
||||
'/media/libopus/src',
|
||||
'/media/libopus/celt',
|
||||
],
|
||||
'direct_dependent_settings': {
|
||||
'include_dirs': [
|
||||
'/media/libopus/include',
|
||||
'/media/libopus/src',
|
||||
'/media/libopus/celt',
|
||||
],
|
||||
},
|
||||
}],
|
||||
],
|
||||
}],
|
||||
|
@ -42,6 +54,9 @@
|
|||
'include_dirs': [
|
||||
'<(webrtc_root)',
|
||||
],
|
||||
'defines': [
|
||||
'OPUS_COMPLEXITY=<(opus_complexity)'
|
||||
],
|
||||
'sources': [
|
||||
'audio_encoder_opus.cc',
|
||||
'interface/audio_encoder_opus.h',
|
||||
|
|
|
@ -115,6 +115,9 @@ int16_t WebRtcOpus_Encode(OpusEncInst* inst,
|
|||
|
||||
int16_t WebRtcOpus_SetBitRate(OpusEncInst* inst, int32_t rate) {
|
||||
if (inst) {
|
||||
#if defined(OPUS_COMPLEXITY) && (OPUS_COMPLEXITY != 0)
|
||||
opus_encoder_ctl(inst->encoder, OPUS_SET_COMPLEXITY(OPUS_COMPLEXITY));
|
||||
#endif
|
||||
return opus_encoder_ctl(inst->encoder, OPUS_SET_BITRATE(rate));
|
||||
} else {
|
||||
return -1;
|
||||
|
|
|
@ -10,12 +10,6 @@
|
|||
'variables': {
|
||||
'audio_coding_dependencies': [
|
||||
'CNG',
|
||||
'G711',
|
||||
'G722',
|
||||
'iLBC',
|
||||
'iSAC',
|
||||
'iSACFix',
|
||||
'PCM16B',
|
||||
'red',
|
||||
'<(webrtc_root)/common.gyp:webrtc_common',
|
||||
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
|
||||
|
@ -27,6 +21,26 @@
|
|||
'audio_coding_dependencies': ['webrtc_opus',],
|
||||
'audio_coding_defines': ['WEBRTC_CODEC_OPUS',],
|
||||
}],
|
||||
['include_g711==1', {
|
||||
'audio_coding_dependencies': ['G711',],
|
||||
'audio_coding_defines': ['WEBRTC_CODEC_G711',],
|
||||
}],
|
||||
['include_g722==1', {
|
||||
'audio_coding_dependencies': ['G722',],
|
||||
'audio_coding_defines': ['WEBRTC_CODEC_G722',],
|
||||
}],
|
||||
['include_ilbc==1', {
|
||||
'audio_coding_dependencies': ['iLBC',],
|
||||
'audio_coding_defines': ['WEBRTC_CODEC_ILBC',],
|
||||
}],
|
||||
['include_isac==1', {
|
||||
'audio_coding_dependencies': ['iSAC', 'iSACFix',],
|
||||
# 'audio_coding_defines': ['WEBRTC_CODEC_ISAC', 'WEBRTC_CODEC_ISACFX',],
|
||||
}],
|
||||
['include_pcm16b==1', {
|
||||
'audio_coding_dependencies': ['PCM16B',],
|
||||
'audio_coding_defines': ['WEBRTC_CODEC_PCM16',],
|
||||
}],
|
||||
],
|
||||
},
|
||||
'targets': [
|
||||
|
|
|
@ -491,13 +491,11 @@ bool AudioCodingModuleImpl::REDStatus() const {
|
|||
}
|
||||
|
||||
// Configure RED status i.e on/off.
|
||||
int AudioCodingModuleImpl::SetREDStatus(
|
||||
#ifdef WEBRTC_CODEC_RED
|
||||
bool enable_red) {
|
||||
int AudioCodingModuleImpl::SetREDStatus(bool enable_red) {
|
||||
CriticalSectionScoped lock(acm_crit_sect_);
|
||||
#ifdef WEBRTC_CODEC_RED
|
||||
return codec_manager_.SetCopyRed(enable_red) ? 0 : -1;
|
||||
#else
|
||||
bool /* enable_red */) {
|
||||
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
|
||||
" WEBRTC_CODEC_RED is undefined");
|
||||
return -1;
|
||||
|
@ -960,7 +958,7 @@ bool AudioCodingImpl::RegisterSendCodec(int encoder_type,
|
|||
|
||||
const AudioEncoder* AudioCodingImpl::GetSenderInfo() const {
|
||||
FATAL() << "Not implemented yet.";
|
||||
return reinterpret_cast<const AudioEncoder*>(NULL);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const CodecInst* AudioCodingImpl::GetSenderCodecInst() {
|
||||
|
@ -979,7 +977,7 @@ int AudioCodingImpl::Add10MsAudio(const AudioFrame& audio_frame) {
|
|||
|
||||
const ReceiverInfo* AudioCodingImpl::GetReceiverInfo() const {
|
||||
FATAL() << "Not implemented yet.";
|
||||
return reinterpret_cast<const ReceiverInfo*>(NULL);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
bool AudioCodingImpl::RegisterReceiveCodec(AudioDecoder* receive_codec) {
|
||||
|
|
|
@ -10,18 +10,34 @@
|
|||
'variables': {
|
||||
'codecs': [
|
||||
'G711',
|
||||
'G722',
|
||||
'PCM16B',
|
||||
'iLBC',
|
||||
'iSAC',
|
||||
'iSACFix',
|
||||
'CNG',
|
||||
],
|
||||
'neteq_defines': [],
|
||||
'conditions': [
|
||||
['include_g722==1', {
|
||||
'neteq_dependencies': ['G722'],
|
||||
'neteq_defines': ['WEBRTC_CODEC_G722',],
|
||||
}],
|
||||
['include_ilbc==1', {
|
||||
'neteq_dependencies': ['iLBC'],
|
||||
'neteq_defines': ['WEBRTC_CODEC_ILBC',],
|
||||
}],
|
||||
['include_isac==1', {
|
||||
'neteq_dependencies': ['iSAC', 'iSACFix',],
|
||||
'neteq_defines': ['WEBRTC_CODEC_ISAC', 'WEBRTC_CODEC_ISACFIX',],
|
||||
}],
|
||||
['include_opus==1', {
|
||||
'codecs': ['webrtc_opus',],
|
||||
'codecs': ['webrtc_opus'],
|
||||
'neteq_dependencies': ['webrtc_opus'],
|
||||
'neteq_defines': ['WEBRTC_CODEC_OPUS',],
|
||||
'conditions': [
|
||||
['build_with_mozilla==0', {
|
||||
'neteq_dependencies': [
|
||||
'<(DEPTH)/third_party/opus/opus.gyp:opus',
|
||||
],
|
||||
}],
|
||||
],
|
||||
}],
|
||||
],
|
||||
'neteq_dependencies': [
|
||||
|
@ -35,6 +51,14 @@
|
|||
{
|
||||
'target_name': 'neteq',
|
||||
'type': 'static_library',
|
||||
'include_dirs': [
|
||||
'../../../../../../media/opus/celt',
|
||||
],
|
||||
'direct_dependent_settings': {
|
||||
'include_dirs': [
|
||||
'../../../../../../media/opus/celt',
|
||||
],
|
||||
},
|
||||
'dependencies': [
|
||||
'<@(neteq_dependencies)',
|
||||
'<(webrtc_root)/common.gyp:webrtc_common',
|
||||
|
@ -42,6 +66,40 @@
|
|||
'defines': [
|
||||
'<@(neteq_defines)',
|
||||
],
|
||||
'conditions': [
|
||||
['build_with_mozilla==0', {
|
||||
'include_dirs': [
|
||||
# Need Opus header files for the audio classifier.
|
||||
'<(DEPTH)/third_party/opus/src/celt',
|
||||
'<(DEPTH)/third_party/opus/src/src',
|
||||
],
|
||||
'direct_dependent_settings': {
|
||||
'include_dirs': [
|
||||
# Need Opus header files for the audio classifier.
|
||||
'<(DEPTH)/third_party/opus/src/celt',
|
||||
'<(DEPTH)/third_party/opus/src/src',
|
||||
],
|
||||
},
|
||||
'export_dependent_settings': [
|
||||
'<(DEPTH)/third_party/opus/opus.gyp:opus',
|
||||
],
|
||||
}],
|
||||
['build_with_mozilla==1', {
|
||||
'include_dirs': [
|
||||
# Need Opus header files for the audio classifier.
|
||||
'<(DEPTH)/../../../media/opus/celt',
|
||||
# '<(DEPTH)/third_party/opus/src/src',
|
||||
],
|
||||
'direct_dependent_settings': {
|
||||
'include_dirs': [
|
||||
'../../../../../../media/opus/celt',
|
||||
# Need Opus header files for the audio classifier.
|
||||
'<(DEPTH)/../../../media/opus/celt',
|
||||
# '<(DEPTH)/third_party/opus/src/src',
|
||||
],
|
||||
},
|
||||
}],
|
||||
],
|
||||
'sources': [
|
||||
'interface/neteq.h',
|
||||
'accelerate.cc',
|
||||
|
@ -126,6 +184,7 @@
|
|||
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
|
||||
'<(webrtc_root)/test/test.gyp:test_support_main',
|
||||
],
|
||||
# FIX for include_isac/etc
|
||||
'defines': [
|
||||
'AUDIO_DECODER_UNITTEST',
|
||||
'WEBRTC_CODEC_G722',
|
||||
|
|
|
@ -84,8 +84,7 @@ class AudioDeviceTemplate : public AudioDeviceGeneric {
|
|||
uint16_t index,
|
||||
char name[kAdmMaxDeviceNameSize],
|
||||
char guid[kAdmMaxGuidSize]) override {
|
||||
FATAL() << "Should never be called";
|
||||
return -1;
|
||||
return input_.RecordingDeviceName(index, name, guid);
|
||||
}
|
||||
|
||||
int32_t SetPlayoutDevice(uint16_t index) override {
|
||||
|
|
|
@ -15,7 +15,9 @@
|
|||
#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_ANDROID_H
|
||||
#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_ANDROID_H
|
||||
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
#include <jni.h>
|
||||
#endif
|
||||
|
||||
#include "webrtc/base/checks.h"
|
||||
#include "webrtc/modules/audio_device/audio_device_utility.h"
|
||||
|
|
|
@ -9,13 +9,18 @@
|
|||
*/
|
||||
|
||||
#include "webrtc/modules/audio_device/android/audio_manager.h"
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
#include "AndroidJNIWrapper.h"
|
||||
#endif
|
||||
|
||||
#include <android/log.h>
|
||||
|
||||
#include "webrtc/base/arraysize.h"
|
||||
#include "webrtc/base/checks.h"
|
||||
#include "webrtc/modules/audio_device/android/audio_common.h"
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
#include "webrtc/modules/utility/interface/helpers_android.h"
|
||||
#endif
|
||||
|
||||
#define TAG "AudioManager"
|
||||
#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
|
||||
|
@ -26,11 +31,14 @@
|
|||
|
||||
namespace webrtc {
|
||||
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
static JavaVM* g_jvm = NULL;
|
||||
static jobject g_context = NULL;
|
||||
static jclass g_audio_manager_class = NULL;
|
||||
#endif
|
||||
|
||||
void AudioManager::SetAndroidAudioDeviceObjects(void* jvm, void* context) {
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
ALOGD("SetAndroidAudioDeviceObjects%s", GetThreadInfo().c_str());
|
||||
|
||||
CHECK(jvm);
|
||||
|
@ -40,13 +48,15 @@ void AudioManager::SetAndroidAudioDeviceObjects(void* jvm, void* context) {
|
|||
JNIEnv* jni = GetEnv(g_jvm);
|
||||
CHECK(jni) << "AttachCurrentThread must be called on this tread";
|
||||
|
||||
g_context = NewGlobalRef(jni, reinterpret_cast<jobject>(context));
|
||||
jclass local_class = FindClass(
|
||||
jni, "org/webrtc/voiceengine/WebRtcAudioManager");
|
||||
g_audio_manager_class = reinterpret_cast<jclass>(
|
||||
NewGlobalRef(jni, local_class));
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (!g_context) {
|
||||
g_context = NewGlobalRef(jni, reinterpret_cast<jobject>(context));
|
||||
}
|
||||
|
||||
if (!g_audio_manager_class) {
|
||||
g_audio_manager_class = jsjni_GetGlobalClassRef(
|
||||
"org/webrtc/voiceengine/WebRtcAudioManager");
|
||||
DCHECK(g_audio_manager_class);
|
||||
}
|
||||
// Register native methods with the WebRtcAudioManager class. These methods
|
||||
// are declared private native in WebRtcAudioManager.java.
|
||||
JNINativeMethod native_methods[] = {
|
||||
|
@ -55,9 +65,11 @@ void AudioManager::SetAndroidAudioDeviceObjects(void* jvm, void* context) {
|
|||
jni->RegisterNatives(g_audio_manager_class,
|
||||
native_methods, arraysize(native_methods));
|
||||
CHECK_EXCEPTION(jni) << "Error during RegisterNatives";
|
||||
#endif
|
||||
}
|
||||
|
||||
void AudioManager::ClearAndroidAudioDeviceObjects() {
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
ALOGD("ClearAndroidAudioDeviceObjects%s", GetThreadInfo().c_str());
|
||||
JNIEnv* jni = GetEnv(g_jvm);
|
||||
CHECK(jni) << "AttachCurrentThread must be called on this tread";
|
||||
|
@ -68,28 +80,36 @@ void AudioManager::ClearAndroidAudioDeviceObjects() {
|
|||
DeleteGlobalRef(jni, g_context);
|
||||
g_context = NULL;
|
||||
g_jvm = NULL;
|
||||
#endif
|
||||
}
|
||||
|
||||
AudioManager::AudioManager()
|
||||
: j_audio_manager_(NULL),
|
||||
initialized_(false) {
|
||||
: initialized_(false) {
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
j_audio_manager_ = NULL;
|
||||
ALOGD("ctor%s", GetThreadInfo().c_str());
|
||||
#endif
|
||||
CHECK(HasDeviceObjects());
|
||||
CreateJavaInstance();
|
||||
}
|
||||
|
||||
AudioManager::~AudioManager() {
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
ALOGD("~dtor%s", GetThreadInfo().c_str());
|
||||
#endif
|
||||
DCHECK(thread_checker_.CalledOnValidThread());
|
||||
Close();
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
AttachThreadScoped ats(g_jvm);
|
||||
JNIEnv* jni = ats.env();
|
||||
jni->DeleteGlobalRef(j_audio_manager_);
|
||||
j_audio_manager_ = NULL;
|
||||
#endif
|
||||
DCHECK(!initialized_);
|
||||
}
|
||||
|
||||
bool AudioManager::Init() {
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
ALOGD("Init%s", GetThreadInfo().c_str());
|
||||
DCHECK(thread_checker_.CalledOnValidThread());
|
||||
DCHECK(!initialized_);
|
||||
|
@ -102,11 +122,13 @@ bool AudioManager::Init() {
|
|||
ALOGE("init failed!");
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
initialized_ = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool AudioManager::Close() {
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
ALOGD("Close%s", GetThreadInfo().c_str());
|
||||
DCHECK(thread_checker_.CalledOnValidThread());
|
||||
if (!initialized_)
|
||||
|
@ -117,10 +139,12 @@ bool AudioManager::Close() {
|
|||
jni, g_audio_manager_class, "dispose", "()V");
|
||||
jni->CallVoidMethod(j_audio_manager_, disposeID);
|
||||
CHECK_EXCEPTION(jni);
|
||||
#endif
|
||||
initialized_ = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
void JNICALL AudioManager::CacheAudioParameters(JNIEnv* env, jobject obj,
|
||||
jint sample_rate, jint channels, jlong nativeAudioManager) {
|
||||
webrtc::AudioManager* this_object =
|
||||
|
@ -138,6 +162,7 @@ void AudioManager::OnCacheAudioParameters(
|
|||
playout_parameters_.reset(sample_rate, channels);
|
||||
record_parameters_.reset(sample_rate, channels);
|
||||
}
|
||||
#endif
|
||||
|
||||
AudioParameters AudioManager::GetPlayoutAudioParameters() const {
|
||||
CHECK(playout_parameters_.is_valid());
|
||||
|
@ -150,10 +175,15 @@ AudioParameters AudioManager::GetRecordAudioParameters() const {
|
|||
}
|
||||
|
||||
bool AudioManager::HasDeviceObjects() {
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
return (g_jvm && g_context && g_audio_manager_class);
|
||||
#else
|
||||
return true;
|
||||
#endif
|
||||
}
|
||||
|
||||
void AudioManager::CreateJavaInstance() {
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
ALOGD("CreateJavaInstance");
|
||||
AttachThreadScoped ats(g_jvm);
|
||||
JNIEnv* jni = ats.env();
|
||||
|
@ -168,6 +198,7 @@ void AudioManager::CreateJavaInstance() {
|
|||
j_audio_manager_ = jni->NewGlobalRef(j_audio_manager_);
|
||||
CHECK_EXCEPTION(jni) << "Error during NewGlobalRef";
|
||||
CHECK(j_audio_manager_);
|
||||
#endif
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
|
@ -11,13 +11,17 @@
|
|||
#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_
|
||||
#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_MANAGER_H_
|
||||
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
#include <jni.h>
|
||||
#endif
|
||||
|
||||
#include "webrtc/base/thread_checker.h"
|
||||
#include "webrtc/modules/audio_device/android/audio_common.h"
|
||||
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
|
||||
#include "webrtc/modules/audio_device/audio_device_generic.h"
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
#include "webrtc/modules/utility/interface/helpers_android.h"
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
|
@ -96,13 +100,15 @@ class AudioManager {
|
|||
bool initialized() const { return initialized_; }
|
||||
|
||||
private:
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
// Called from Java side so we can cache the native audio parameters.
|
||||
// This method will be called by the WebRtcAudioManager constructor, i.e.
|
||||
// on the same thread that this object is created on.
|
||||
static void JNICALL CacheAudioParameters(JNIEnv* env, jobject obj,
|
||||
jint sample_rate, jint channels, jlong nativeAudioManager);
|
||||
void OnCacheAudioParameters(JNIEnv* env, jint sample_rate, jint channels);
|
||||
|
||||
#endif
|
||||
|
||||
// Returns true if SetAndroidAudioDeviceObjects() has been called
|
||||
// successfully.
|
||||
bool HasDeviceObjects();
|
||||
|
@ -115,8 +121,10 @@ class AudioManager {
|
|||
// other methods are called from the same thread.
|
||||
rtc::ThreadChecker thread_checker_;
|
||||
|
||||
#if !defined(MOZ_WIDGET_GONK)
|
||||
// The Java WebRtcAudioManager instance.
|
||||
jobject j_audio_manager_;
|
||||
#endif
|
||||
|
||||
// Set to true by Init() and false by Close().
|
||||
bool initialized_;
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#include <android/log.h>
|
||||
#include <assert.h>
|
||||
|
||||
#include "AndroidJNIWrapper.h"
|
||||
#include "webrtc/modules/utility/interface/helpers_android.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
|
||||
|
@ -53,23 +54,18 @@ void AudioManagerJni::SetAndroidAudioDeviceObjects(void* jvm, void* context) {
|
|||
// Store global Java VM variables to be accessed by API calls.
|
||||
g_jvm_ = reinterpret_cast<JavaVM*>(jvm);
|
||||
g_jni_env_ = GetEnv(g_jvm_);
|
||||
g_context_ = g_jni_env_->NewGlobalRef(reinterpret_cast<jobject>(context));
|
||||
|
||||
// FindClass must be made in this function since this function's contract
|
||||
// requires it to be called by a Java thread.
|
||||
// See
|
||||
// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
|
||||
// as to why this is necessary.
|
||||
// Get the AudioManagerAndroid class object.
|
||||
jclass javaAmClassLocal = g_jni_env_->FindClass(
|
||||
"org/webrtc/voiceengine/AudioManagerAndroid");
|
||||
assert(javaAmClassLocal);
|
||||
if (!g_context_) {
|
||||
g_context_ = g_jni_env_->NewGlobalRef(reinterpret_cast<jobject>(context));
|
||||
}
|
||||
|
||||
// Create a global reference such that the class object is not recycled by
|
||||
// the garbage collector.
|
||||
g_audio_manager_class_ = reinterpret_cast<jclass>(
|
||||
g_jni_env_->NewGlobalRef(javaAmClassLocal));
|
||||
assert(g_audio_manager_class_);
|
||||
if (!g_audio_manager_class_) {
|
||||
// Create a global reference such that the class object is not recycled by
|
||||
// the garbage collector.
|
||||
g_audio_manager_class_ = jsjni_GetGlobalClassRef(
|
||||
"org/webrtc/voiceengine/AudioManagerAndroid");
|
||||
DCHECK(g_audio_manager_class_);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioManagerJni::ClearAndroidAudioDeviceObjects() {
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "AndroidJNIWrapper.h"
|
||||
#include "webrtc/modules/audio_device/android/audio_record_jni.h"
|
||||
|
||||
#include <android/log.h>
|
||||
|
@ -44,17 +45,16 @@ void AudioRecordJni::SetAndroidAudioDeviceObjects(void* jvm, void* context) {
|
|||
JNIEnv* jni = GetEnv(g_jvm);
|
||||
CHECK(jni) << "AttachCurrentThread must be called on this tread";
|
||||
|
||||
// Protect context from being deleted during garbage collection.
|
||||
g_context = NewGlobalRef(jni, reinterpret_cast<jobject>(context));
|
||||
if (!g_context) {
|
||||
// Protect context from being deleted during garbage collection.
|
||||
g_context = NewGlobalRef(jni, reinterpret_cast<jobject>(context));
|
||||
}
|
||||
|
||||
// Load the locally-defined WebRtcAudioRecord class and create a new global
|
||||
// reference to it.
|
||||
jclass local_class = FindClass(
|
||||
jni, "org/webrtc/voiceengine/WebRtcAudioRecord");
|
||||
g_audio_record_class = reinterpret_cast<jclass>(
|
||||
NewGlobalRef(jni, local_class));
|
||||
jni->DeleteLocalRef(local_class);
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (!g_audio_record_class) {
|
||||
g_audio_record_class = jsjni_GetGlobalClassRef(
|
||||
"org/webrtc/voiceengine/WebRtcAudioRecord");
|
||||
DCHECK(g_audio_record_class);
|
||||
}
|
||||
|
||||
// Register native methods with the WebRtcAudioRecord class. These methods
|
||||
// are declared private native in WebRtcAudioRecord.java.
|
||||
|
@ -318,4 +318,18 @@ void AudioRecordJni::CreateJavaInstance() {
|
|||
CHECK(j_audio_record_);
|
||||
}
|
||||
|
||||
int32_t AudioRecordJni::RecordingDeviceName(uint16_t index,
|
||||
char name[kAdmMaxDeviceNameSize],
|
||||
char guid[kAdmMaxGuidSize]) {
|
||||
// Return empty string
|
||||
memset(name, 0, kAdmMaxDeviceNameSize);
|
||||
|
||||
if (guid)
|
||||
{
|
||||
memset(guid, 0, kAdmMaxGuidSize);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
|
@ -78,6 +78,9 @@ class AudioRecordJni {
|
|||
|
||||
bool BuiltInAECIsAvailable() const;
|
||||
int32_t EnableBuiltInAEC(bool enable);
|
||||
int32_t RecordingDeviceName(uint16_t index,
|
||||
char name[kAdmMaxDeviceNameSize],
|
||||
char guid[kAdmMaxGuidSize]);
|
||||
|
||||
private:
|
||||
// Called from Java side so we can cache the address of the Java-manged
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
|
||||
#include "webrtc/modules/audio_device/android/audio_manager.h"
|
||||
#include "webrtc/modules/audio_device/android/audio_track_jni.h"
|
||||
#include "AndroidJNIWrapper.h"
|
||||
|
||||
#include <android/log.h>
|
||||
|
||||
|
@ -39,13 +40,15 @@ void AudioTrackJni::SetAndroidAudioDeviceObjects(void* jvm, void* context) {
|
|||
JNIEnv* jni = GetEnv(g_jvm);
|
||||
CHECK(jni) << "AttachCurrentThread must be called on this tread";
|
||||
|
||||
g_context = NewGlobalRef(jni, reinterpret_cast<jobject>(context));
|
||||
jclass local_class = FindClass(
|
||||
jni, "org/webrtc/voiceengine/WebRtcAudioTrack");
|
||||
g_audio_track_class = reinterpret_cast<jclass>(
|
||||
NewGlobalRef(jni, local_class));
|
||||
jni->DeleteLocalRef(local_class);
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (!g_context) {
|
||||
g_context = NewGlobalRef(jni, reinterpret_cast<jobject>(context));
|
||||
}
|
||||
|
||||
if (!g_audio_track_class) {
|
||||
g_audio_track_class = jsjni_GetGlobalClassRef(
|
||||
"org/webrtc/voiceengine/WebRtcAudioTrack");
|
||||
DCHECK(g_audio_track_class);
|
||||
}
|
||||
|
||||
// Register native methods with the WebRtcAudioTrack class. These methods
|
||||
// are declared private native in WebRtcAudioTrack.java.
|
||||
|
@ -338,4 +341,18 @@ void AudioTrackJni::CreateJavaInstance() {
|
|||
CHECK(j_audio_track_);
|
||||
}
|
||||
|
||||
int32_t AudioTrackJni::PlayoutDeviceName(uint16_t index,
|
||||
char name[kAdmMaxDeviceNameSize],
|
||||
char guid[kAdmMaxGuidSize]) {
|
||||
// Return empty string
|
||||
memset(name, 0, kAdmMaxDeviceNameSize);
|
||||
|
||||
if (guid)
|
||||
{
|
||||
memset(guid, 0, kAdmMaxGuidSize);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
|
@ -74,6 +74,10 @@ class AudioTrackJni : public PlayoutDelayProvider {
|
|||
int32_t PlayoutDelay(uint16_t& delayMS) const;
|
||||
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
|
||||
|
||||
int32_t PlayoutDeviceName(uint16_t index,
|
||||
char name[kAdmMaxDeviceNameSize],
|
||||
char guid[kAdmMaxGuidSize]);
|
||||
|
||||
protected:
|
||||
// PlayoutDelayProvider implementation.
|
||||
virtual int PlayoutDelayMs();
|
||||
|
|
|
@ -16,7 +16,14 @@ package org.webrtc.voiceengine;
|
|||
import android.content.Context;
|
||||
import android.content.pm.PackageManager;
|
||||
import android.media.AudioManager;
|
||||
import android.util.Log;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.mozilla.gecko.annotation.WebRTCJNITarget;
|
||||
|
||||
@WebRTCJNITarget
|
||||
class AudioManagerAndroid {
|
||||
// Most of Google lead devices use 44.1K as the default sampling rate, 44.1K
|
||||
// is also widely used on other android devices.
|
||||
|
@ -38,21 +45,28 @@ class AudioManagerAndroid {
|
|||
|
||||
mNativeOutputSampleRate = DEFAULT_SAMPLING_RATE;
|
||||
mAudioLowLatencyOutputFrameSize = DEFAULT_FRAMES_PER_BUFFER;
|
||||
mAudioLowLatencySupported = context.getPackageManager().hasSystemFeature(
|
||||
PackageManager.FEATURE_AUDIO_LOW_LATENCY);
|
||||
if (android.os.Build.VERSION.SDK_INT >=
|
||||
android.os.Build.VERSION_CODES.JELLY_BEAN_MR1) {
|
||||
String sampleRateString = audioManager.getProperty(
|
||||
AudioManager.PROPERTY_OUTPUT_SAMPLE_RATE);
|
||||
if (sampleRateString != null) {
|
||||
mNativeOutputSampleRate = Integer.parseInt(sampleRateString);
|
||||
}
|
||||
String framesPerBuffer = audioManager.getProperty(
|
||||
AudioManager.PROPERTY_OUTPUT_FRAMES_PER_BUFFER);
|
||||
if (framesPerBuffer != null) {
|
||||
17 /*android.os.Build.VERSION_CODES.JELLY_BEAN_MR1*/) {
|
||||
try {
|
||||
Method getProperty = AudioManager.class.getMethod("getProperty", String.class);
|
||||
Field sampleRateField = AudioManager.class.getField("PROPERTY_OUTPUT_SAMPLE_RATE");
|
||||
Field framePerBufferField = AudioManager.class.getField("PROPERTY_OUTPUT_FRAMES_PER_BUFFER");
|
||||
String sampleRateKey = (String)sampleRateField.get(null);
|
||||
String framePerBufferKey = (String)framePerBufferField.get(null);
|
||||
String sampleRateString = (String)getProperty.invoke(audioManager, sampleRateKey);
|
||||
if (sampleRateString != null) {
|
||||
mNativeOutputSampleRate = Integer.parseInt(sampleRateString);
|
||||
}
|
||||
String framesPerBuffer = (String)getProperty.invoke(audioManager, sampleRateKey);
|
||||
if (framesPerBuffer != null) {
|
||||
mAudioLowLatencyOutputFrameSize = Integer.parseInt(framesPerBuffer);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
Log.w("WebRTC", "error getting low latency params", ex);
|
||||
}
|
||||
}
|
||||
mAudioLowLatencySupported = context.getPackageManager().hasSystemFeature(
|
||||
PackageManager.FEATURE_AUDIO_LOW_LATENCY);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
|
@ -69,4 +83,4 @@ class AudioManagerAndroid {
|
|||
private int getAudioLowLatencyOutputFrameSize() {
|
||||
return mAudioLowLatencyOutputFrameSize;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,6 +26,10 @@ import android.util.Log;
|
|||
// recommended to always use AudioManager.MODE_IN_COMMUNICATION.
|
||||
// This class also adds support for output volume control of the
|
||||
// STREAM_VOICE_CALL-type stream.
|
||||
|
||||
import org.mozilla.gecko.annotation.WebRTCJNITarget;
|
||||
|
||||
@WebRTCJNITarget
|
||||
class WebRtcAudioManager {
|
||||
private static final boolean DEBUG = false;
|
||||
|
||||
|
|
|
@ -26,6 +26,9 @@ import android.os.Process;
|
|||
import android.os.SystemClock;
|
||||
import android.util.Log;
|
||||
|
||||
import org.mozilla.gecko.annotation.WebRTCJNITarget;
|
||||
|
||||
@WebRTCJNITarget
|
||||
class WebRtcAudioRecord {
|
||||
private static final boolean DEBUG = false;
|
||||
|
||||
|
@ -46,7 +49,7 @@ class WebRtcAudioRecord {
|
|||
|
||||
private ByteBuffer byteBuffer;
|
||||
|
||||
private AudioRecord audioRecord = null;
|
||||
private AudioRecord audioRecord;
|
||||
private AudioRecordThread audioThread = null;
|
||||
|
||||
private AcousticEchoCanceler aec = null;
|
||||
|
@ -163,7 +166,7 @@ class WebRtcAudioRecord {
|
|||
channels + ")");
|
||||
final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
|
||||
final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND;
|
||||
byteBuffer = byteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer);
|
||||
byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer);
|
||||
Logd("byteBuffer.capacity: " + byteBuffer.capacity());
|
||||
// Rather than passing the ByteBuffer with every callback (requiring
|
||||
// the potentially expensive GetDirectBufferAddress) we simply have the
|
||||
|
@ -188,8 +191,14 @@ class WebRtcAudioRecord {
|
|||
|
||||
int bufferSizeInBytes = Math.max(byteBuffer.capacity(), minBufferSize);
|
||||
Logd("bufferSizeInBytes: " + bufferSizeInBytes);
|
||||
|
||||
int audioSource = AudioSource.VOICE_COMMUNICATION;
|
||||
if (android.os.Build.VERSION.SDK_INT < 11) {
|
||||
audioSource = AudioSource.DEFAULT;
|
||||
}
|
||||
|
||||
try {
|
||||
audioRecord = new AudioRecord(AudioSource.VOICE_COMMUNICATION,
|
||||
audioRecord = new AudioRecord(audioSource,
|
||||
sampleRate,
|
||||
AudioFormat.CHANNEL_IN_MONO,
|
||||
AudioFormat.ENCODING_PCM_16BIT,
|
||||
|
|
|
@ -20,6 +20,9 @@ import android.media.AudioTrack;
|
|||
import android.os.Process;
|
||||
import android.util.Log;
|
||||
|
||||
import org.mozilla.gecko.annotation.WebRTCJNITarget;
|
||||
|
||||
@WebRTCJNITarget
|
||||
class WebRtcAudioTrack {
|
||||
private static final boolean DEBUG = false;
|
||||
|
||||
|
@ -41,7 +44,7 @@ class WebRtcAudioTrack {
|
|||
|
||||
private ByteBuffer byteBuffer;
|
||||
|
||||
private AudioTrack audioTrack = null;
|
||||
private AudioTrack audioTrack;
|
||||
private AudioTrackThread audioThread = null;
|
||||
|
||||
/**
|
||||
|
@ -149,7 +152,7 @@ class WebRtcAudioTrack {
|
|||
Logd("InitPlayout(sampleRate=" + sampleRate + ", channels=" +
|
||||
channels + ")");
|
||||
final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
|
||||
byteBuffer = byteBuffer.allocateDirect(
|
||||
byteBuffer = ByteBuffer.allocateDirect(
|
||||
bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
|
||||
Logd("byteBuffer.capacity: " + byteBuffer.capacity());
|
||||
// Rather than passing the ByteBuffer with every callback (requiring
|
||||
|
|
|
@ -11,7 +11,9 @@
|
|||
#include "webrtc/modules/audio_device/android/opensles_input.h"
|
||||
|
||||
#include <assert.h>
|
||||
#include <dlfcn.h>
|
||||
|
||||
#include "OpenSLESProvider.h"
|
||||
#include "webrtc/modules/audio_device/android/audio_common.h"
|
||||
#include "webrtc/modules/audio_device/android/opensles_common.h"
|
||||
#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
|
||||
|
@ -20,6 +22,13 @@
|
|||
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
|
||||
#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
|
||||
#include <media/AudioSystem.h>
|
||||
#include <audio_effects/effect_aec.h>
|
||||
#include <audio_effects/effect_ns.h>
|
||||
#include <utils/Errors.h>
|
||||
#endif
|
||||
|
||||
#define VOID_RETURN
|
||||
#define OPENSL_RETURN_ON_FAILURE(op, ret_val) \
|
||||
do { \
|
||||
|
@ -60,7 +69,12 @@ OpenSlesInput::OpenSlesInput(
|
|||
active_queue_(0),
|
||||
rec_sampling_rate_(0),
|
||||
agc_enabled_(false),
|
||||
recording_delay_(0) {
|
||||
#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
|
||||
aec_(NULL),
|
||||
ns_(NULL),
|
||||
#endif
|
||||
recording_delay_(0),
|
||||
opensles_lib_(NULL) {
|
||||
}
|
||||
|
||||
OpenSlesInput::~OpenSlesInput() {
|
||||
|
@ -68,24 +82,64 @@ OpenSlesInput::~OpenSlesInput() {
|
|||
|
||||
int32_t OpenSlesInput::SetAndroidAudioDeviceObjects(void* javaVM,
|
||||
void* context) {
|
||||
#if !defined(WEBRTC_GONK)
|
||||
AudioManagerJni::SetAndroidAudioDeviceObjects(javaVM, context);
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
|
||||
void OpenSlesInput::ClearAndroidAudioDeviceObjects() {
|
||||
#if !defined(WEBRTC_GONK)
|
||||
AudioManagerJni::ClearAndroidAudioDeviceObjects();
|
||||
#endif
|
||||
}
|
||||
|
||||
int32_t OpenSlesInput::Init() {
|
||||
assert(!initialized_);
|
||||
|
||||
/* Try to dynamically open the OpenSLES library */
|
||||
opensles_lib_ = dlopen("libOpenSLES.so", RTLD_LAZY);
|
||||
if (!opensles_lib_) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0,
|
||||
" failed to dlopen OpenSLES library");
|
||||
return -1;
|
||||
}
|
||||
|
||||
f_slCreateEngine = (slCreateEngine_t)dlsym(opensles_lib_, "slCreateEngine");
|
||||
SL_IID_ENGINE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ENGINE");
|
||||
SL_IID_BUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_BUFFERQUEUE");
|
||||
SL_IID_ANDROIDCONFIGURATION_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDCONFIGURATION");
|
||||
SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDSIMPLEBUFFERQUEUE");
|
||||
SL_IID_RECORD_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_RECORD");
|
||||
|
||||
if (!f_slCreateEngine ||
|
||||
!SL_IID_ENGINE_ ||
|
||||
!SL_IID_BUFFERQUEUE_ ||
|
||||
!SL_IID_ANDROIDCONFIGURATION_ ||
|
||||
!SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ ||
|
||||
!SL_IID_RECORD_) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0,
|
||||
" failed to find OpenSLES function");
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Set up OpenSL engine.
|
||||
OPENSL_RETURN_ON_FAILURE(slCreateEngine(&sles_engine_, 1, kOption, 0,
|
||||
NULL, NULL),
|
||||
#ifndef MOZILLA_INTERNAL_API
|
||||
OPENSL_RETURN_ON_FAILURE(f_slCreateEngine(&sles_engine_, 1, kOption, 0,
|
||||
NULL, NULL),
|
||||
-1);
|
||||
#else
|
||||
OPENSL_RETURN_ON_FAILURE(mozilla_get_sles_engine(&sles_engine_, 1, kOption), -1);
|
||||
#endif
|
||||
#ifndef MOZILLA_INTERNAL_API
|
||||
OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_,
|
||||
SL_BOOLEAN_FALSE),
|
||||
-1);
|
||||
#else
|
||||
OPENSL_RETURN_ON_FAILURE(mozilla_realize_sles_engine(sles_engine_), -1);
|
||||
#endif
|
||||
OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_,
|
||||
SL_IID_ENGINE,
|
||||
SL_IID_ENGINE_,
|
||||
&sles_engine_itf_),
|
||||
-1);
|
||||
|
||||
|
@ -100,10 +154,15 @@ int32_t OpenSlesInput::Init() {
|
|||
int32_t OpenSlesInput::Terminate() {
|
||||
// It is assumed that the caller has stopped recording before terminating.
|
||||
assert(!recording_);
|
||||
#ifndef MOZILLA_INTERNAL_API
|
||||
(*sles_engine_)->Destroy(sles_engine_);
|
||||
#else
|
||||
mozilla_destroy_sles_engine(&sles_engine_);
|
||||
#endif
|
||||
initialized_ = false;
|
||||
mic_initialized_ = false;
|
||||
rec_initialized_ = false;
|
||||
dlclose(opensles_lib_);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -225,6 +284,14 @@ int32_t OpenSlesInput::StereoRecordingIsAvailable(bool& available) { // NOLINT
|
|||
return 0;
|
||||
}
|
||||
|
||||
int32_t OpenSlesInput::SetStereoRecording(bool enable) { // NOLINT
|
||||
if (enable) {
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
int32_t OpenSlesInput::StereoRecording(bool& enabled) const { // NOLINT
|
||||
enabled = false;
|
||||
return 0;
|
||||
|
@ -268,8 +335,12 @@ void OpenSlesInput::UpdateRecordingDelay() {
|
|||
}
|
||||
|
||||
void OpenSlesInput::UpdateSampleRate() {
|
||||
#if !defined(WEBRTC_GONK)
|
||||
rec_sampling_rate_ = audio_manager_.low_latency_supported() ?
|
||||
audio_manager_.native_output_sample_rate() : kDefaultSampleRate;
|
||||
#else
|
||||
rec_sampling_rate_ = kDefaultSampleRate;
|
||||
#endif
|
||||
}
|
||||
|
||||
void OpenSlesInput::CalculateNumFifoBuffersNeeded() {
|
||||
|
@ -321,6 +392,104 @@ bool OpenSlesInput::EnqueueAllBuffers() {
|
|||
return true;
|
||||
}
|
||||
|
||||
void OpenSlesInput::SetupVoiceMode() {
|
||||
SLAndroidConfigurationItf configItf;
|
||||
SLresult res = (*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_ANDROIDCONFIGURATION_,
|
||||
(void*)&configItf);
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "OpenSL GetInterface: %d", res);
|
||||
|
||||
if (res == SL_RESULT_SUCCESS) {
|
||||
SLuint32 voiceMode = SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION;
|
||||
SLuint32 voiceSize = sizeof(voiceMode);
|
||||
|
||||
res = (*configItf)->SetConfiguration(configItf,
|
||||
SL_ANDROID_KEY_RECORDING_PRESET,
|
||||
&voiceMode, voiceSize);
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "OpenSL Set Voice mode res: %d", res);
|
||||
}
|
||||
}
|
||||
|
||||
#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
|
||||
bool OpenSlesInput::CheckPlatformAEC() {
|
||||
effect_descriptor_t fxDesc;
|
||||
uint32_t numFx;
|
||||
|
||||
if (android::AudioEffect::queryNumberEffects(&numFx) != android::NO_ERROR) {
|
||||
return false;
|
||||
}
|
||||
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "Platform has %d effects", numFx);
|
||||
|
||||
for (uint32_t i = 0; i < numFx; i++) {
|
||||
if (android::AudioEffect::queryEffect(i, &fxDesc) != android::NO_ERROR) {
|
||||
continue;
|
||||
}
|
||||
if (memcmp(&fxDesc.type, FX_IID_AEC, sizeof(fxDesc.type)) == 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void OpenSlesInput::SetupAECAndNS() {
|
||||
bool hasAec = CheckPlatformAEC();
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "Platform has AEC: %d", hasAec);
|
||||
// This code should not have been enabled if this fails, because it means the
|
||||
// software AEC has will have been disabled as well. If you hit this, you need
|
||||
// to fix your B2G config or fix the hardware AEC on your device.
|
||||
assert(hasAec);
|
||||
|
||||
SLAndroidConfigurationItf configItf;
|
||||
SLresult res = (*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_ANDROIDCONFIGURATION_,
|
||||
(void*)&configItf);
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "OpenSL GetInterface: %d", res);
|
||||
|
||||
if (res == SL_RESULT_SUCCESS) {
|
||||
SLuint32 sessionId = 0;
|
||||
SLuint32 idSize = sizeof(sessionId);
|
||||
res = (*configItf)->GetConfiguration(configItf,
|
||||
SL_ANDROID_KEY_RECORDING_SESSION_ID,
|
||||
&idSize, &sessionId);
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "OpenSL Get sessionId res: %d", res);
|
||||
|
||||
if (res == SL_RESULT_SUCCESS && idSize == sizeof(sessionId)) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "OpenSL sessionId: %d", sessionId);
|
||||
|
||||
aec_ = new android::AudioEffect(FX_IID_AEC, NULL, 0, 0, 0, sessionId, 0);
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "OpenSL aec: %p", aec_);
|
||||
|
||||
if (aec_) {
|
||||
android::status_t status = aec_->initCheck();
|
||||
if (status == android::NO_ERROR || status == android::ALREADY_EXISTS) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "OpenSL aec enabled");
|
||||
aec_->setEnabled(true);
|
||||
} else {
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "OpenSL aec disabled: %d", status);
|
||||
delete aec_;
|
||||
aec_ = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
ns_ = new android::AudioEffect(FX_IID_NS, NULL, 0, 0, 0, sessionId, 0);
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "OpenSL ns: %p", ns_);
|
||||
|
||||
if (ns_) {
|
||||
android::status_t status = ns_->initCheck();
|
||||
if (status == android::NO_ERROR || status == android::ALREADY_EXISTS) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "OpenSL ns enabled");
|
||||
ns_->setEnabled(true);
|
||||
} else {
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, 0, "OpenSL ns disabled: %d", status);
|
||||
delete ns_;
|
||||
ns_ = NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
bool OpenSlesInput::CreateAudioRecorder() {
|
||||
if (!event_.Start()) {
|
||||
assert(false);
|
||||
|
@ -343,7 +512,7 @@ bool OpenSlesInput::CreateAudioRecorder() {
|
|||
// Note the interfaces still need to be initialized. This only tells OpenSl
|
||||
// that the interfaces will be needed at some point.
|
||||
const SLInterfaceID id[kNumInterfaces] = {
|
||||
SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION };
|
||||
SL_IID_ANDROIDSIMPLEBUFFERQUEUE_, SL_IID_ANDROIDCONFIGURATION_ };
|
||||
const SLboolean req[kNumInterfaces] = {
|
||||
SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
|
||||
OPENSL_RETURN_ON_FAILURE(
|
||||
|
@ -359,33 +528,29 @@ bool OpenSlesInput::CreateAudioRecorder() {
|
|||
SLAndroidConfigurationItf recorder_config;
|
||||
OPENSL_RETURN_ON_FAILURE(
|
||||
(*sles_recorder_)->GetInterface(sles_recorder_,
|
||||
SL_IID_ANDROIDCONFIGURATION,
|
||||
SL_IID_ANDROIDCONFIGURATION_,
|
||||
&recorder_config),
|
||||
false);
|
||||
|
||||
// Set audio recorder configuration to
|
||||
// SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION which ensures that we
|
||||
// use the main microphone tuned for audio communications.
|
||||
SLint32 stream_type = SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION;
|
||||
OPENSL_RETURN_ON_FAILURE(
|
||||
(*recorder_config)->SetConfiguration(recorder_config,
|
||||
SL_ANDROID_KEY_RECORDING_PRESET,
|
||||
&stream_type,
|
||||
sizeof(SLint32)),
|
||||
false);
|
||||
SetupVoiceMode();
|
||||
|
||||
// Realize the recorder in synchronous mode.
|
||||
OPENSL_RETURN_ON_FAILURE((*sles_recorder_)->Realize(sles_recorder_,
|
||||
SL_BOOLEAN_FALSE),
|
||||
false);
|
||||
|
||||
#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
|
||||
SetupAECAndNS();
|
||||
#endif
|
||||
|
||||
OPENSL_RETURN_ON_FAILURE(
|
||||
(*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_RECORD,
|
||||
(*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_RECORD_,
|
||||
static_cast<void*>(&sles_recorder_itf_)),
|
||||
false);
|
||||
OPENSL_RETURN_ON_FAILURE(
|
||||
(*sles_recorder_)->GetInterface(
|
||||
sles_recorder_,
|
||||
SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
|
||||
SL_IID_ANDROIDSIMPLEBUFFERQUEUE_,
|
||||
static_cast<void*>(&sles_recorder_sbq_itf_)),
|
||||
false);
|
||||
return true;
|
||||
|
@ -393,6 +558,14 @@ bool OpenSlesInput::CreateAudioRecorder() {
|
|||
|
||||
void OpenSlesInput::DestroyAudioRecorder() {
|
||||
event_.Stop();
|
||||
|
||||
#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
|
||||
delete aec_;
|
||||
delete ns_;
|
||||
aec_ = NULL;
|
||||
ns_ = NULL;
|
||||
#endif
|
||||
|
||||
if (sles_recorder_sbq_itf_) {
|
||||
// Release all buffers currently queued up.
|
||||
OPENSL_RETURN_ON_FAILURE(
|
||||
|
@ -526,7 +699,8 @@ bool OpenSlesInput::CbThreadImpl() {
|
|||
while (fifo_->size() > 0 && recording_) {
|
||||
int8_t* audio = fifo_->Pop();
|
||||
audio_buffer_->SetRecordedBuffer(audio, buffer_size_samples());
|
||||
audio_buffer_->SetVQEData(delay_provider_->PlayoutDelayMs(),
|
||||
audio_buffer_->SetVQEData(delay_provider_ ?
|
||||
delay_provider_->PlayoutDelayMs() : 0,
|
||||
recording_delay_, 0);
|
||||
audio_buffer_->DeliverRecordedData();
|
||||
}
|
||||
|
|
|
@ -16,7 +16,14 @@
|
|||
#include <SLES/OpenSLES_AndroidConfiguration.h>
|
||||
|
||||
#include "webrtc/base/scoped_ptr.h"
|
||||
// Not defined in the android version we use to build with
|
||||
#define SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION ((SLuint32) 0x00000004)
|
||||
|
||||
#if !defined(WEBRTC_GONK)
|
||||
#include "webrtc/modules/audio_device/android/audio_manager_jni.h"
|
||||
#else
|
||||
#include "media/AudioEffect.h"
|
||||
#endif
|
||||
#include "webrtc/modules/audio_device/android/low_latency_event.h"
|
||||
#include "webrtc/modules/audio_device/include/audio_device.h"
|
||||
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
|
||||
|
@ -105,7 +112,7 @@ class OpenSlesInput {
|
|||
|
||||
// Stereo support
|
||||
int32_t StereoRecordingIsAvailable(bool& available); // NOLINT
|
||||
int32_t SetStereoRecording(bool enable) { return -1; }
|
||||
int32_t SetStereoRecording(bool enable);
|
||||
int32_t StereoRecording(bool& enabled) const; // NOLINT
|
||||
|
||||
// Delay information and control
|
||||
|
@ -129,7 +136,7 @@ class OpenSlesInput {
|
|||
// Keep as few OpenSL buffers as possible to avoid wasting memory. 2 is
|
||||
// minimum for playout. Keep 2 for recording as well.
|
||||
kNumOpenSlBuffers = 2,
|
||||
kNum10MsToBuffer = 3,
|
||||
kNum10MsToBuffer = 8,
|
||||
};
|
||||
|
||||
int InitSampleRate();
|
||||
|
@ -145,6 +152,11 @@ class OpenSlesInput {
|
|||
// etc, so it should be called when starting recording.
|
||||
bool CreateAudioRecorder();
|
||||
void DestroyAudioRecorder();
|
||||
void SetupVoiceMode();
|
||||
#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
|
||||
void SetupAECAndNS();
|
||||
bool CheckPlatformAEC();
|
||||
#endif
|
||||
|
||||
// When overrun happens there will be more frames received from OpenSL than
|
||||
// the desired number of buffers. It is possible to expand the number of
|
||||
|
@ -177,8 +189,10 @@ class OpenSlesInput {
|
|||
|
||||
PlayoutDelayProvider* delay_provider_;
|
||||
|
||||
#if !defined(WEBRTC_GONK)
|
||||
// Java API handle
|
||||
AudioManagerJni audio_manager_;
|
||||
#endif
|
||||
|
||||
// TODO(henrika): improve this area
|
||||
// PlayoutDelayProvider* delay_provider_;
|
||||
|
@ -223,8 +237,27 @@ class OpenSlesInput {
|
|||
uint32_t rec_sampling_rate_;
|
||||
bool agc_enabled_;
|
||||
|
||||
#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
|
||||
android::AudioEffect* aec_;
|
||||
android::AudioEffect* ns_;
|
||||
#endif
|
||||
// Audio status
|
||||
uint16_t recording_delay_;
|
||||
|
||||
// dlopen for OpenSLES
|
||||
void *opensles_lib_;
|
||||
typedef SLresult (*slCreateEngine_t)(SLObjectItf *,
|
||||
SLuint32,
|
||||
const SLEngineOption *,
|
||||
SLuint32,
|
||||
const SLInterfaceID *,
|
||||
const SLboolean *);
|
||||
slCreateEngine_t f_slCreateEngine;
|
||||
SLInterfaceID SL_IID_ENGINE_;
|
||||
SLInterfaceID SL_IID_BUFFERQUEUE_;
|
||||
SLInterfaceID SL_IID_ANDROIDCONFIGURATION_;
|
||||
SLInterfaceID SL_IID_ANDROIDSIMPLEBUFFERQUEUE_;
|
||||
SLInterfaceID SL_IID_RECORD_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
|
|
@ -8,10 +8,14 @@
|
|||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
|
||||
|
||||
#include "webrtc/modules/audio_device/android/opensles_output.h"
|
||||
|
||||
#include <assert.h>
|
||||
#include <dlfcn.h>
|
||||
|
||||
#include "OpenSLESProvider.h"
|
||||
#include "webrtc/modules/audio_device/android/opensles_common.h"
|
||||
#include "webrtc/modules/audio_device/android/fine_audio_buffer.h"
|
||||
#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
|
||||
|
@ -60,7 +64,8 @@ OpenSlesOutput::OpenSlesOutput(AudioManager* audio_manager)
|
|||
speaker_sampling_rate_(kDefaultSampleRate),
|
||||
buffer_size_samples_(0),
|
||||
buffer_size_bytes_(0),
|
||||
playout_delay_(0) {
|
||||
playout_delay_(0),
|
||||
opensles_lib_(NULL) {
|
||||
}
|
||||
|
||||
OpenSlesOutput::~OpenSlesOutput() {
|
||||
|
@ -79,15 +84,51 @@ void OpenSlesOutput::ClearAndroidAudioDeviceObjects() {
|
|||
int32_t OpenSlesOutput::Init() {
|
||||
assert(!initialized_);
|
||||
|
||||
/* Try to dynamically open the OpenSLES library */
|
||||
opensles_lib_ = dlopen("libOpenSLES.so", RTLD_LAZY);
|
||||
if (!opensles_lib_) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
||||
" failed to dlopen OpenSLES library");
|
||||
return -1;
|
||||
}
|
||||
|
||||
f_slCreateEngine = (slCreateEngine_t)dlsym(opensles_lib_, "slCreateEngine");
|
||||
SL_IID_ENGINE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ENGINE");
|
||||
SL_IID_BUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_BUFFERQUEUE");
|
||||
SL_IID_ANDROIDCONFIGURATION_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDCONFIGURATION");
|
||||
SL_IID_PLAY_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_PLAY");
|
||||
SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDSIMPLEBUFFERQUEUE");
|
||||
SL_IID_VOLUME_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_VOLUME");
|
||||
|
||||
if (!f_slCreateEngine ||
|
||||
!SL_IID_ENGINE_ ||
|
||||
!SL_IID_BUFFERQUEUE_ ||
|
||||
!SL_IID_ANDROIDCONFIGURATION_ ||
|
||||
!SL_IID_PLAY_ ||
|
||||
!SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ ||
|
||||
!SL_IID_VOLUME_) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
||||
" failed to find OpenSLES function");
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Set up OpenSl engine.
|
||||
OPENSL_RETURN_ON_FAILURE(slCreateEngine(&sles_engine_, 1, kOption, 0,
|
||||
NULL, NULL),
|
||||
#ifndef MOZILLA_INTERNAL_API
|
||||
OPENSL_RETURN_ON_FAILURE(f_slCreateEngine(&sles_engine_, 1, kOption, 0,
|
||||
NULL, NULL),
|
||||
-1);
|
||||
#else
|
||||
OPENSL_RETURN_ON_FAILURE(mozilla_get_sles_engine(&sles_engine_, 1, kOption), -1);
|
||||
#endif
|
||||
#ifndef MOZILLA_INTERNAL_API
|
||||
OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_,
|
||||
SL_BOOLEAN_FALSE),
|
||||
-1);
|
||||
#else
|
||||
OPENSL_RETURN_ON_FAILURE(mozilla_realize_sles_engine(sles_engine_), -1);
|
||||
#endif
|
||||
OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_,
|
||||
SL_IID_ENGINE,
|
||||
SL_IID_ENGINE_,
|
||||
&sles_engine_itf_),
|
||||
-1);
|
||||
// Set up OpenSl output mix.
|
||||
|
@ -115,10 +156,15 @@ int32_t OpenSlesOutput::Terminate() {
|
|||
// It is assumed that the caller has stopped recording before terminating.
|
||||
assert(!playing_);
|
||||
(*sles_output_mixer_)->Destroy(sles_output_mixer_);
|
||||
#ifndef MOZILLA_INTERNAL_API
|
||||
(*sles_engine_)->Destroy(sles_engine_);
|
||||
#else
|
||||
mozilla_destroy_sles_engine(&sles_engine_);
|
||||
#endif
|
||||
initialized_ = false;
|
||||
speaker_initialized_ = false;
|
||||
play_initialized_ = false;
|
||||
dlclose(opensles_lib_);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -302,6 +348,7 @@ void OpenSlesOutput::UpdatePlayoutDelay() {
|
|||
}
|
||||
|
||||
bool OpenSlesOutput::SetLowLatency() {
|
||||
#if !defined(WEBRTC_GONK)
|
||||
if (!audio_manager_.low_latency_supported()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -310,6 +357,9 @@ bool OpenSlesOutput::SetLowLatency() {
|
|||
speaker_sampling_rate_ = audio_manager_.native_output_sample_rate();
|
||||
assert(speaker_sampling_rate_ > 0);
|
||||
return true;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
void OpenSlesOutput::CalculateNumFifoBuffersNeeded() {
|
||||
|
@ -395,7 +445,7 @@ bool OpenSlesOutput::CreateAudioPlayer() {
|
|||
// Note the interfaces still need to be initialized. This only tells OpenSl
|
||||
// that the interfaces will be needed at some point.
|
||||
SLInterfaceID ids[kNumInterfaces] = {
|
||||
SL_IID_BUFFERQUEUE, SL_IID_VOLUME, SL_IID_ANDROIDCONFIGURATION };
|
||||
SL_IID_BUFFERQUEUE_, SL_IID_VOLUME_, SL_IID_ANDROIDCONFIGURATION_ };
|
||||
SLboolean req[kNumInterfaces] = {
|
||||
SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
|
||||
OPENSL_RETURN_ON_FAILURE(
|
||||
|
@ -407,7 +457,7 @@ bool OpenSlesOutput::CreateAudioPlayer() {
|
|||
SLAndroidConfigurationItf player_config;
|
||||
OPENSL_RETURN_ON_FAILURE(
|
||||
(*sles_player_)->GetInterface(sles_player_,
|
||||
SL_IID_ANDROIDCONFIGURATION,
|
||||
SL_IID_ANDROIDCONFIGURATION_,
|
||||
&player_config),
|
||||
false);
|
||||
|
||||
|
@ -426,11 +476,11 @@ bool OpenSlesOutput::CreateAudioPlayer() {
|
|||
SL_BOOLEAN_FALSE),
|
||||
false);
|
||||
OPENSL_RETURN_ON_FAILURE(
|
||||
(*sles_player_)->GetInterface(sles_player_, SL_IID_PLAY,
|
||||
(*sles_player_)->GetInterface(sles_player_, SL_IID_PLAY_,
|
||||
&sles_player_itf_),
|
||||
false);
|
||||
OPENSL_RETURN_ON_FAILURE(
|
||||
(*sles_player_)->GetInterface(sles_player_, SL_IID_BUFFERQUEUE,
|
||||
(*sles_player_)->GetInterface(sles_player_, SL_IID_BUFFERQUEUE_,
|
||||
&sles_player_sbq_itf_),
|
||||
false);
|
||||
return true;
|
||||
|
@ -574,3 +624,5 @@ bool OpenSlesOutput::CbThreadImpl() {
|
|||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif
|
||||
|
|
|
@ -16,8 +16,10 @@
|
|||
#include <SLES/OpenSLES_AndroidConfiguration.h>
|
||||
|
||||
#include "webrtc/base/scoped_ptr.h"
|
||||
#if !defined(WEBRTC_GONK)
|
||||
#include "webrtc/modules/audio_device/android/audio_manager.h"
|
||||
#include "webrtc/modules/audio_device/android/audio_manager_jni.h"
|
||||
#endif
|
||||
#include "webrtc/modules/audio_device/android/low_latency_event.h"
|
||||
#include "webrtc/modules/audio_device/android/audio_common.h"
|
||||
#include "webrtc/modules/audio_device/include/audio_device_defines.h"
|
||||
|
@ -31,6 +33,9 @@ class FineAudioBuffer;
|
|||
class SingleRwFifo;
|
||||
class ThreadWrapper;
|
||||
|
||||
#if defined(WEBRTC_ANDROID_OPENSLES_OUTPUT)
|
||||
// allow us to replace it with a dummy
|
||||
|
||||
// OpenSL implementation that facilitate playing PCM data to an android device.
|
||||
// This class is Thread-compatible. I.e. Given an instance of this class, calls
|
||||
// to non-const methods require exclusive access to the object.
|
||||
|
@ -189,8 +194,10 @@ class OpenSlesOutput : public PlayoutDelayProvider {
|
|||
// Thread-compatible.
|
||||
bool CbThreadImpl();
|
||||
|
||||
#if !defined(WEBRTC_GONK)
|
||||
// Java API handle
|
||||
AudioManagerJni audio_manager_;
|
||||
#endif
|
||||
|
||||
bool initialized_;
|
||||
bool speaker_initialized_;
|
||||
|
@ -236,8 +243,217 @@ class OpenSlesOutput : public PlayoutDelayProvider {
|
|||
|
||||
// Audio status
|
||||
uint16_t playout_delay_;
|
||||
|
||||
// dlopen for OpenSLES
|
||||
void *opensles_lib_;
|
||||
typedef SLresult (*slCreateEngine_t)(SLObjectItf *,
|
||||
SLuint32,
|
||||
const SLEngineOption *,
|
||||
SLuint32,
|
||||
const SLInterfaceID *,
|
||||
const SLboolean *);
|
||||
slCreateEngine_t f_slCreateEngine;
|
||||
SLInterfaceID SL_IID_ENGINE_;
|
||||
SLInterfaceID SL_IID_BUFFERQUEUE_;
|
||||
SLInterfaceID SL_IID_ANDROIDCONFIGURATION_;
|
||||
SLInterfaceID SL_IID_PLAY_;
|
||||
SLInterfaceID SL_IID_ANDROIDSIMPLEBUFFERQUEUE_;
|
||||
SLInterfaceID SL_IID_VOLUME_;
|
||||
};
|
||||
|
||||
#else
|
||||
|
||||
// Dummy OpenSlesOutput
|
||||
class OpenSlesOutput : public PlayoutDelayProvider {
|
||||
public:
|
||||
explicit OpenSlesOutput(AudioManager* audio_manager) :
|
||||
initialized_(false), speaker_initialized_(false),
|
||||
play_initialized_(false), playing_(false)
|
||||
{}
|
||||
virtual ~OpenSlesOutput() {}
|
||||
|
||||
static int32_t SetAndroidAudioDeviceObjects(void* javaVM,
|
||||
void* context) { return 0; }
|
||||
static void ClearAndroidAudioDeviceObjects() {}
|
||||
|
||||
// Main initializaton and termination
|
||||
int32_t Init() { initialized_ = true; return 0; }
|
||||
int32_t Terminate() { initialized_ = false; return 0; }
|
||||
bool Initialized() const { return initialized_; }
|
||||
|
||||
// Device enumeration
|
||||
int16_t PlayoutDevices() { return 1; }
|
||||
|
||||
int32_t PlayoutDeviceName(uint16_t index,
|
||||
char name[kAdmMaxDeviceNameSize],
|
||||
char guid[kAdmMaxGuidSize])
|
||||
{
|
||||
assert(index == 0);
|
||||
// Empty strings.
|
||||
name[0] = '\0';
|
||||
guid[0] = '\0';
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Device selection
|
||||
int32_t SetPlayoutDevice(uint16_t index)
|
||||
{
|
||||
assert(index == 0);
|
||||
return 0;
|
||||
}
|
||||
int32_t SetPlayoutDevice(
|
||||
AudioDeviceModule::WindowsDeviceType device) { return 0; }
|
||||
|
||||
// No-op
|
||||
int32_t SetPlayoutSampleRate(uint32_t sample_rate_hz) { return 0; }
|
||||
|
||||
// Audio transport initialization
|
||||
int32_t PlayoutIsAvailable(bool& available) // NOLINT
|
||||
{
|
||||
available = true;
|
||||
return 0;
|
||||
}
|
||||
int32_t InitPlayout()
|
||||
{
|
||||
assert(initialized_);
|
||||
play_initialized_ = true;
|
||||
return 0;
|
||||
}
|
||||
bool PlayoutIsInitialized() const { return play_initialized_; }
|
||||
|
||||
// Audio transport control
|
||||
int32_t StartPlayout()
|
||||
{
|
||||
assert(play_initialized_);
|
||||
assert(!playing_);
|
||||
playing_ = true;
|
||||
return 0;
|
||||
}
|
||||
|
||||
int32_t StopPlayout()
|
||||
{
|
||||
playing_ = false;
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool Playing() const { return playing_; }
|
||||
|
||||
// Audio mixer initialization
|
||||
int32_t SpeakerIsAvailable(bool& available) // NOLINT
|
||||
{
|
||||
available = true;
|
||||
return 0;
|
||||
}
|
||||
int32_t InitSpeaker()
|
||||
{
|
||||
assert(!playing_);
|
||||
speaker_initialized_ = true;
|
||||
return 0;
|
||||
}
|
||||
bool SpeakerIsInitialized() const { return speaker_initialized_; }
|
||||
|
||||
// Speaker volume controls
|
||||
int32_t SpeakerVolumeIsAvailable(bool& available) // NOLINT
|
||||
{
|
||||
available = true;
|
||||
return 0;
|
||||
}
|
||||
int32_t SetSpeakerVolume(uint32_t volume)
|
||||
{
|
||||
assert(speaker_initialized_);
|
||||
assert(initialized_);
|
||||
return 0;
|
||||
}
|
||||
int32_t SpeakerVolume(uint32_t& volume) const { return 0; } // NOLINT
|
||||
int32_t MaxSpeakerVolume(uint32_t& maxVolume) const // NOLINT
|
||||
{
|
||||
assert(speaker_initialized_);
|
||||
assert(initialized_);
|
||||
maxVolume = 0;
|
||||
return 0;
|
||||
}
|
||||
int32_t MinSpeakerVolume(uint32_t& minVolume) const // NOLINT
|
||||
{
|
||||
assert(speaker_initialized_);
|
||||
assert(initialized_);
|
||||
minVolume = 0;
|
||||
return 0;
|
||||
}
|
||||
int32_t SpeakerVolumeStepSize(uint16_t& stepSize) const // NOLINT
|
||||
{
|
||||
assert(speaker_initialized_);
|
||||
assert(initialized_);
|
||||
stepSize = 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Speaker mute control
|
||||
int32_t SpeakerMuteIsAvailable(bool& available) // NOLINT
|
||||
{
|
||||
available = true;
|
||||
return 0;
|
||||
}
|
||||
int32_t SetSpeakerMute(bool enable) { return -1; }
|
||||
int32_t SpeakerMute(bool& enabled) const { return -1; } // NOLINT
|
||||
|
||||
|
||||
// Stereo support
|
||||
int32_t StereoPlayoutIsAvailable(bool& available) // NOLINT
|
||||
{
|
||||
available = true;
|
||||
return 0;
|
||||
}
|
||||
int32_t SetStereoPlayout(bool enable)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
int32_t StereoPlayout(bool& enabled) const // NOLINT
|
||||
{
|
||||
enabled = kNumChannels == 2;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Delay information and control
|
||||
int32_t SetPlayoutBuffer(const AudioDeviceModule::BufferType type,
|
||||
uint16_t sizeMS) { return -1; }
|
||||
int32_t PlayoutBuffer(AudioDeviceModule::BufferType& type, // NOLINT
|
||||
uint16_t& sizeMS) const
|
||||
{
|
||||
type = AudioDeviceModule::kAdaptiveBufferSize;
|
||||
sizeMS = 40;
|
||||
return 0;
|
||||
}
|
||||
int32_t PlayoutDelay(uint16_t& delayMS) const // NOLINT
|
||||
{
|
||||
delayMS = 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
// Error and warning information
|
||||
bool PlayoutWarning() const { return false; }
|
||||
bool PlayoutError() const { return false; }
|
||||
void ClearPlayoutWarning() {}
|
||||
void ClearPlayoutError() {}
|
||||
|
||||
// Attach audio buffer
|
||||
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {}
|
||||
|
||||
// Speaker audio routing
|
||||
int32_t SetLoudspeakerStatus(bool enable) { return 0; }
|
||||
int32_t GetLoudspeakerStatus(bool& enable) const { enable = true; return 0; } // NOLINT
|
||||
|
||||
protected:
|
||||
virtual int PlayoutDelayMs() { return 40; }
|
||||
|
||||
private:
|
||||
bool initialized_;
|
||||
bool speaker_initialized_;
|
||||
bool play_initialized_;
|
||||
bool playing_;
|
||||
};
|
||||
#endif
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_OUTPUT_H_
|
||||
|
|
|
@ -8,6 +8,10 @@
|
|||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#if defined(_MSC_VER)
|
||||
#include <windows.h>
|
||||
#endif
|
||||
|
||||
#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
|
||||
|
||||
#include <assert.h>
|
||||
|
@ -20,7 +24,19 @@ namespace webrtc {
|
|||
|
||||
namespace subtle {
|
||||
|
||||
#if defined(__aarch64__)
|
||||
// Start with compiler support, then processor-specific hacks
|
||||
#if defined(__GNUC__) || defined(__clang__)
|
||||
// Available on GCC and clang - others?
|
||||
inline void MemoryBarrier() {
|
||||
__sync_synchronize();
|
||||
}
|
||||
|
||||
#elif defined(_MSC_VER)
|
||||
inline void MemoryBarrier() {
|
||||
::MemoryBarrier();
|
||||
}
|
||||
|
||||
#elif defined(__aarch64__)
|
||||
// From http://http://src.chromium.org/viewvc/chrome/trunk/src/base/atomicops_internals_arm64_gcc.h
|
||||
inline void MemoryBarrier() {
|
||||
__asm__ __volatile__ ("dmb ish" ::: "memory");
|
||||
|
|
|
@ -51,11 +51,26 @@
|
|||
'dummy/file_audio_device.h',
|
||||
],
|
||||
'conditions': [
|
||||
['OS=="linux"', {
|
||||
['build_with_mozilla==1', {
|
||||
'cflags_mozilla': [
|
||||
'$(NSPR_CFLAGS)',
|
||||
],
|
||||
}],
|
||||
['hardware_aec_ns==1', {
|
||||
'defines': [
|
||||
'WEBRTC_HARDWARE_AEC_NS',
|
||||
],
|
||||
}],
|
||||
['include_sndio_audio==1', {
|
||||
'include_dirs': [
|
||||
'sndio',
|
||||
],
|
||||
}], # include_sndio_audio==1
|
||||
['OS=="linux" or include_alsa_audio==1 or include_pulse_audio==1', {
|
||||
'include_dirs': [
|
||||
'linux',
|
||||
],
|
||||
}], # OS==linux
|
||||
}], # OS=="linux" or include_alsa_audio==1 or include_pulse_audio==1
|
||||
['OS=="ios"', {
|
||||
'include_dirs': [
|
||||
'ios',
|
||||
|
@ -73,9 +88,27 @@
|
|||
}],
|
||||
['OS=="android"', {
|
||||
'include_dirs': [
|
||||
'/widget/android',
|
||||
'android',
|
||||
],
|
||||
}], # OS==android
|
||||
['moz_widget_toolkit_gonk==1', {
|
||||
'cflags_mozilla': [
|
||||
'-I$(ANDROID_SOURCE)/frameworks/wilhelm/include',
|
||||
'-I$(ANDROID_SOURCE)/frameworks/av/include',
|
||||
'-I$(ANDROID_SOURCE)/system/media/wilhelm/include',
|
||||
'-I$(ANDROID_SOURCE)/system/media/audio_effects/include',
|
||||
'-I$(ANDROID_SOURCE)/frameworks/native/include',
|
||||
],
|
||||
'include_dirs': [
|
||||
'android',
|
||||
],
|
||||
}], # moz_widget_toolkit_gonk==1
|
||||
['enable_android_opensl==1', {
|
||||
'include_dirs': [
|
||||
'opensl',
|
||||
],
|
||||
}], # enable_android_opensl
|
||||
['include_internal_audio_device==0', {
|
||||
'defines': [
|
||||
'WEBRTC_DUMMY_AUDIO_BUILD',
|
||||
|
@ -90,14 +123,8 @@
|
|||
}],
|
||||
['include_internal_audio_device==1', {
|
||||
'sources': [
|
||||
'linux/alsasymboltable_linux.cc',
|
||||
'linux/alsasymboltable_linux.h',
|
||||
'linux/audio_device_alsa_linux.cc',
|
||||
'linux/audio_device_alsa_linux.h',
|
||||
'linux/audio_device_utility_linux.cc',
|
||||
'linux/audio_device_utility_linux.h',
|
||||
'linux/audio_mixer_manager_alsa_linux.cc',
|
||||
'linux/audio_mixer_manager_alsa_linux.h',
|
||||
'linux/latebindingsymboltable_linux.cc',
|
||||
'linux/latebindingsymboltable_linux.h',
|
||||
'ios/audio_device_ios.mm',
|
||||
|
@ -121,9 +148,10 @@
|
|||
'win/audio_device_utility_win.h',
|
||||
'win/audio_mixer_manager_win.cc',
|
||||
'win/audio_mixer_manager_win.h',
|
||||
# used externally for getUserMedia
|
||||
'opensl/single_rw_fifo.cc',
|
||||
'opensl/single_rw_fifo.h',
|
||||
'android/audio_device_template.h',
|
||||
'android/audio_device_utility_android.cc',
|
||||
'android/audio_device_utility_android.h',
|
||||
'android/audio_manager.cc',
|
||||
'android/audio_manager.h',
|
||||
'android/audio_manager_jni.cc',
|
||||
|
@ -132,51 +160,103 @@
|
|||
'android/audio_record_jni.h',
|
||||
'android/audio_track_jni.cc',
|
||||
'android/audio_track_jni.h',
|
||||
'android/fine_audio_buffer.cc',
|
||||
'android/fine_audio_buffer.h',
|
||||
'android/low_latency_event_posix.cc',
|
||||
'android/low_latency_event.h',
|
||||
'android/opensles_common.cc',
|
||||
'android/opensles_common.h',
|
||||
'android/opensles_input.cc',
|
||||
'android/opensles_input.h',
|
||||
'android/opensles_output.cc',
|
||||
'android/opensles_output.h',
|
||||
'android/single_rw_fifo.cc',
|
||||
'android/single_rw_fifo.h',
|
||||
],
|
||||
'conditions': [
|
||||
['OS=="android"', {
|
||||
['moz_widget_toolkit_gonk==1', {
|
||||
'sources': [
|
||||
# references to android/audio_manager to avoid platform-specific limits
|
||||
'gonk/audio_manager.cc',
|
||||
'gonk/audio_manager.h',
|
||||
],
|
||||
}],
|
||||
['OS=="android" or moz_widget_toolkit_gonk==1', {
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'-llog',
|
||||
'-lOpenSLES',
|
||||
],
|
||||
},
|
||||
'conditions': [
|
||||
['enable_android_opensl==1', {
|
||||
'sources': [
|
||||
'opensl/fine_audio_buffer.cc',
|
||||
'opensl/fine_audio_buffer.h',
|
||||
'opensl/low_latency_event_posix.cc',
|
||||
'opensl/low_latency_event.h',
|
||||
'opensl/opensles_common.cc',
|
||||
'opensl/opensles_common.h',
|
||||
'opensl/opensles_input.cc',
|
||||
'opensl/opensles_input.h',
|
||||
'opensl/opensles_output.h',
|
||||
'shared/audio_device_utility_shared.cc',
|
||||
'shared/audio_device_utility_shared.h',
|
||||
],
|
||||
}, {
|
||||
'sources': [
|
||||
'shared/audio_device_utility_shared.cc',
|
||||
'shared/audio_device_utility_shared.h',
|
||||
],
|
||||
}],
|
||||
['enable_android_opensl_output==1', {
|
||||
'sources': [
|
||||
'opensl/opensles_output.cc'
|
||||
],
|
||||
'defines': [
|
||||
'WEBRTC_ANDROID_OPENSLES_OUTPUT',
|
||||
],
|
||||
}],
|
||||
],
|
||||
}],
|
||||
['OS=="linux"', {
|
||||
'defines': [
|
||||
'LINUX_ALSA',
|
||||
],
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'-ldl','-lX11',
|
||||
],
|
||||
},
|
||||
'conditions': [
|
||||
['include_pulse_audio==1', {
|
||||
'defines': [
|
||||
'LINUX_PULSE',
|
||||
],
|
||||
'sources': [
|
||||
'linux/audio_device_pulse_linux.cc',
|
||||
'linux/audio_device_pulse_linux.h',
|
||||
'linux/audio_mixer_manager_pulse_linux.cc',
|
||||
'linux/audio_mixer_manager_pulse_linux.h',
|
||||
'linux/pulseaudiosymboltable_linux.cc',
|
||||
'linux/pulseaudiosymboltable_linux.h',
|
||||
],
|
||||
}],
|
||||
}],
|
||||
['include_sndio_audio==1', {
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'-lsndio',
|
||||
],
|
||||
},
|
||||
'sources': [
|
||||
'sndio/audio_device_sndio.cc',
|
||||
'sndio/audio_device_sndio.h',
|
||||
'sndio/audio_device_utility_sndio.cc',
|
||||
'sndio/audio_device_utility_sndio.h',
|
||||
],
|
||||
}],
|
||||
['include_alsa_audio==1', {
|
||||
'cflags_mozilla': [
|
||||
'$(MOZ_ALSA_CFLAGS)',
|
||||
],
|
||||
'defines': [
|
||||
'LINUX_ALSA',
|
||||
],
|
||||
'sources': [
|
||||
'linux/alsasymboltable_linux.cc',
|
||||
'linux/alsasymboltable_linux.h',
|
||||
'linux/audio_device_alsa_linux.cc',
|
||||
'linux/audio_device_alsa_linux.h',
|
||||
'linux/audio_mixer_manager_alsa_linux.cc',
|
||||
'linux/audio_mixer_manager_alsa_linux.h',
|
||||
],
|
||||
}],
|
||||
['include_pulse_audio==1', {
|
||||
'cflags_mozilla': [
|
||||
'$(MOZ_PULSEAUDIO_CFLAGS)',
|
||||
],
|
||||
'defines': [
|
||||
'LINUX_PULSE',
|
||||
],
|
||||
'sources': [
|
||||
'linux/audio_device_pulse_linux.cc',
|
||||
'linux/audio_device_pulse_linux.h',
|
||||
'linux/audio_mixer_manager_pulse_linux.cc',
|
||||
'linux/audio_mixer_manager_pulse_linux.h',
|
||||
'linux/pulseaudiosymboltable_linux.cc',
|
||||
'linux/pulseaudiosymboltable_linux.h',
|
||||
],
|
||||
}],
|
||||
['OS=="mac"', {
|
||||
|
@ -286,6 +366,8 @@
|
|||
'<(webrtc_root)/test/test.gyp:test_support_main',
|
||||
],
|
||||
'sources': [
|
||||
'android/audio_manager.cc',
|
||||
'android/audio_manager.h',
|
||||
'android/fine_audio_buffer_unittest.cc',
|
||||
'android/low_latency_event_unittest.cc',
|
||||
'android/single_rw_fifo_unittest.cc',
|
||||
|
@ -298,4 +380,3 @@
|
|||
}], # include_tests
|
||||
],
|
||||
}
|
||||
|
||||
|
|
|
@ -16,21 +16,31 @@
|
|||
#include <assert.h>
|
||||
#include <string.h>
|
||||
|
||||
#if defined(_WIN32)
|
||||
#if defined(WEBRTC_DUMMY_AUDIO_BUILD)
|
||||
// do not include platform specific headers
|
||||
#elif defined(_WIN32)
|
||||
#include "audio_device_utility_win.h"
|
||||
#include "audio_device_wave_win.h"
|
||||
#if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
|
||||
#include "audio_device_core_win.h"
|
||||
#endif
|
||||
#elif defined(WEBRTC_ANDROID)
|
||||
#elif defined(WEBRTC_ANDROID_OPENSLES)
|
||||
// ANDROID and GONK
|
||||
#include <stdlib.h>
|
||||
#include <dlfcn.h>
|
||||
#include "audio_device_utility_android.h"
|
||||
#include "webrtc/modules/audio_device/android/audio_device_template.h"
|
||||
#if !defined(WEBRTC_GONK)
|
||||
// GONK only supports opensles; android can use that or jni
|
||||
#include "webrtc/modules/audio_device/android/audio_record_jni.h"
|
||||
#include "webrtc/modules/audio_device/android/audio_track_jni.h"
|
||||
#endif
|
||||
#include "webrtc/modules/audio_device/android/opensles_input.h"
|
||||
#include "webrtc/modules/audio_device/android/opensles_output.h"
|
||||
#elif defined(WEBRTC_LINUX)
|
||||
#elif defined(WEBRTC_AUDIO_SNDIO)
|
||||
#include "audio_device_utility_sndio.h"
|
||||
#include "audio_device_sndio.h"
|
||||
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||
#include "audio_device_utility_linux.h"
|
||||
#if defined(LINUX_ALSA)
|
||||
#include "audio_device_alsa_linux.h"
|
||||
|
@ -164,7 +174,10 @@ int32_t AudioDeviceModuleImpl::CheckPlatform()
|
|||
#elif defined(WEBRTC_ANDROID)
|
||||
platform = kPlatformAndroid;
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is ANDROID");
|
||||
#elif defined(WEBRTC_LINUX)
|
||||
#elif defined(WEBRTC_AUDIO_SNDIO)
|
||||
platform = kPlatformSndio;
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is POSIX using SNDIO");
|
||||
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||
platform = kPlatformLinux;
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is LINUX");
|
||||
#elif defined(WEBRTC_IOS)
|
||||
|
@ -271,33 +284,57 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects()
|
|||
|
||||
// Create the *Android OpenSLES* implementation of the Audio Device
|
||||
//
|
||||
#if defined(WEBRTC_ANDROID)
|
||||
#ifdef WEBRTC_ANDROID_OPENSLES
|
||||
// Force default audio layer to OpenSL ES if the special compiler flag
|
||||
// (enable_android_opensl) has been set to one.
|
||||
#if defined(WEBRTC_ANDROID) || defined (WEBRTC_GONK)
|
||||
if (audioLayer == kPlatformDefaultAudio) {
|
||||
audioLayer = kAndroidOpenSLESAudio;
|
||||
}
|
||||
#endif
|
||||
if (audioLayer == kPlatformDefaultAudio ||
|
||||
audioLayer == kAndroidJavaAudio) {
|
||||
ptrAudioDevice =
|
||||
new AudioDeviceTemplate<AudioRecordJni, AudioTrackJni>(Id());
|
||||
} else if (audioLayer == kAndroidOpenSLESAudio) {
|
||||
// AudioRecordJni provides hardware AEC and OpenSlesOutput low latency.
|
||||
ptrAudioDevice =
|
||||
new AudioDeviceTemplate<OpenSlesInput, OpenSlesOutput>(Id());
|
||||
// AudioRecordJni provides hardware AEC and OpenSlesOutput low latency.
|
||||
#if defined (WEBRTC_ANDROID_OPENSLES)
|
||||
// Android and Gonk
|
||||
// Check if the OpenSLES library is available before going further.
|
||||
void* opensles_lib = dlopen("libOpenSLES.so", RTLD_LAZY);
|
||||
if (opensles_lib) {
|
||||
// That worked, close for now and proceed normally.
|
||||
dlclose(opensles_lib);
|
||||
if (audioLayer == kPlatformDefaultAudio)
|
||||
{
|
||||
// Create *Android OpenSLES Audio* implementation
|
||||
ptrAudioDevice = new AudioDeviceTemplate<OpenSlesInput, OpenSlesOutput>(Id());
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
|
||||
"Android OpenSLES Audio APIs will be utilized");
|
||||
}
|
||||
}
|
||||
#endif // defined (WEBRTC_ANDROID_OPENSLES)
|
||||
#if !defined(WEBRTC_GONK)
|
||||
// Fall back to this case if on Android 2.2/OpenSLES not available.
|
||||
if (ptrAudioDevice == NULL) {
|
||||
// Create the *Android Java* implementation of the Audio Device
|
||||
if (audioLayer == kPlatformDefaultAudio)
|
||||
{
|
||||
// Create *Android JNI Audio* implementation
|
||||
ptrAudioDevice = new AudioDeviceTemplate<AudioRecordJni, AudioTrackJni>(Id());
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized");
|
||||
}
|
||||
}
|
||||
#endif // !defined (WEBRTC_GONK)
|
||||
}
|
||||
|
||||
if (ptrAudioDevice != NULL) {
|
||||
// Create the Android implementation of the Device Utility.
|
||||
ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
|
||||
}
|
||||
// END #if defined(WEBRTC_ANDROID)
|
||||
|
||||
#elif defined(WEBRTC_AUDIO_SNDIO)
|
||||
ptrAudioDevice = new AudioDeviceSndio(Id());
|
||||
if (ptrAudioDevice != NULL)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "attempting to use the Sndio audio API...");
|
||||
_platformAudioLayer = kSndioAudio;
|
||||
// Create the sndio implementation of the Device Utility.
|
||||
ptrAudioDeviceUtility = new AudioDeviceUtilitySndio(Id());
|
||||
}
|
||||
|
||||
// Create the *Linux* implementation of the Audio Device
|
||||
//
|
||||
#elif defined(WEBRTC_LINUX)
|
||||
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||
if ((audioLayer == kLinuxPulseAudio) || (audioLayer == kPlatformDefaultAudio))
|
||||
{
|
||||
#if defined(LINUX_PULSE)
|
||||
|
@ -345,7 +382,7 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects()
|
|||
//
|
||||
ptrAudioDeviceUtility = new AudioDeviceUtilityLinux(Id());
|
||||
}
|
||||
#endif // #if defined(WEBRTC_LINUX)
|
||||
#endif // #if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
|
||||
|
||||
// Create the *iPhone* implementation of the Audio Device
|
||||
//
|
||||
|
@ -573,6 +610,10 @@ int32_t AudioDeviceModuleImpl::ActiveAudioLayer(AudioLayer* audioLayer) const
|
|||
{
|
||||
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kLinuxAlsaAudio");
|
||||
}
|
||||
else if (*audioLayer == AudioDeviceModule::kSndioAudio)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kSndioAudio");
|
||||
}
|
||||
else
|
||||
{
|
||||
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: NOT_SUPPORTED");
|
||||
|
@ -2023,6 +2064,10 @@ AudioDeviceModule::AudioLayer AudioDeviceModuleImpl::PlatformAudioLayer() const
|
|||
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
|
||||
"output: kLinuxAlsaAudio");
|
||||
break;
|
||||
case kSndioAudio:
|
||||
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
|
||||
"output: kSndioAudio");
|
||||
break;
|
||||
case kDummyAudio:
|
||||
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
|
||||
"output: kDummyAudio");
|
||||
|
|
|
@ -32,7 +32,8 @@ public:
|
|||
kPlatformLinux = 3,
|
||||
kPlatformMac = 4,
|
||||
kPlatformAndroid = 5,
|
||||
kPlatformIOS = 6
|
||||
kPlatformIOS = 6,
|
||||
kPlatformSndio = 7
|
||||
};
|
||||
|
||||
int32_t CheckPlatform();
|
||||
|
|
|
@ -46,7 +46,7 @@ bool AudioDeviceUtility::StringCompare(
|
|||
|
||||
} // namespace webrtc
|
||||
|
||||
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
|
||||
#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
|
||||
|
||||
// ============================================================================
|
||||
// Linux & Mac
|
||||
|
@ -109,4 +109,4 @@ bool AudioDeviceUtility::StringCompare(
|
|||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
|
||||
#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
#include "../android/audio_manager.cc"
|
|
@ -0,0 +1,6 @@
|
|||
#ifndef WEBRTC_MODULES_AUDIO_DEVICE_GONK_AUDIO_MANAGER_H_
|
||||
#define WEBRTC_MODULES_AUDIO_DEVICE_GONK_AUDIO_MANAGER_H_
|
||||
|
||||
#include "../android/audio_manager.h"
|
||||
|
||||
#endif
|
|
@ -31,7 +31,8 @@ class AudioDeviceModule : public RefCountedModule {
|
|||
kLinuxPulseAudio = 4,
|
||||
kAndroidJavaAudio = 5,
|
||||
kAndroidOpenSLESAudio = 6,
|
||||
kDummyAudio = 7
|
||||
kSndioAudio = 7,
|
||||
kDummyAudio = 8
|
||||
};
|
||||
|
||||
enum WindowsDeviceType {
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
|
||||
|
||||
namespace webrtc {
|
||||
const uint32_t N_REC_SAMPLES_PER_SEC = 44000;
|
||||
const uint32_t N_PLAY_SAMPLES_PER_SEC = 44000;
|
||||
const uint32_t N_REC_SAMPLES_PER_SEC = 44100;
|
||||
const uint32_t N_PLAY_SAMPLES_PER_SEC = 44100;
|
||||
|
||||
const uint32_t N_REC_CHANNELS = 1; // default is mono recording
|
||||
const uint32_t N_PLAY_CHANNELS = 1; // default is mono playout
|
||||
|
|
|
@ -18,6 +18,13 @@
|
|||
#include "webrtc/system_wrappers/interface/sleep.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
|
||||
#include "Latency.h"
|
||||
|
||||
#define LOG_FIRST_CAPTURE(x) LogTime(AsyncLatencyLogger::AudioCaptureBase, \
|
||||
reinterpret_cast<uint64_t>(x), 0)
|
||||
#define LOG_CAPTURE_FRAMES(x, frames) LogLatency(AsyncLatencyLogger::AudioCapture, \
|
||||
reinterpret_cast<uint64_t>(x), frames)
|
||||
|
||||
webrtc_adm_linux_alsa::AlsaSymbolTable AlsaSymbolTable;
|
||||
|
||||
// Accesses ALSA functions through our late-binding symbol table instead of
|
||||
|
@ -90,6 +97,7 @@ AudioDeviceLinuxALSA::AudioDeviceLinuxALSA(const int32_t id) :
|
|||
_playBufType(AudioDeviceModule::kFixedBufferSize),
|
||||
_initialized(false),
|
||||
_recording(false),
|
||||
_firstRecord(true),
|
||||
_playing(false),
|
||||
_recIsInitialized(false),
|
||||
_playIsInitialized(false),
|
||||
|
@ -903,7 +911,8 @@ int32_t AudioDeviceLinuxALSA::RecordingDeviceName(
|
|||
memset(guid, 0, kAdmMaxGuidSize);
|
||||
}
|
||||
|
||||
return GetDevicesInfo(1, false, index, name, kAdmMaxDeviceNameSize);
|
||||
return GetDevicesInfo(1, false, index, name, kAdmMaxDeviceNameSize,
|
||||
guid, kAdmMaxGuidSize);
|
||||
}
|
||||
|
||||
int16_t AudioDeviceLinuxALSA::RecordingDevices()
|
||||
|
@ -1365,6 +1374,7 @@ int32_t AudioDeviceLinuxALSA::StartRecording()
|
|||
}
|
||||
// RECORDING
|
||||
const char* threadName = "webrtc_audio_module_capture_thread";
|
||||
_firstRecord = true;
|
||||
_ptrThreadRec = ThreadWrapper::CreateThread(
|
||||
RecThreadFunc, this, threadName);
|
||||
|
||||
|
@ -1521,6 +1531,16 @@ int32_t AudioDeviceLinuxALSA::StartPlayout()
|
|||
const char* threadName = "webrtc_audio_module_play_thread";
|
||||
_ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this,
|
||||
threadName);
|
||||
int errVal = LATE(snd_pcm_prepare)(_handlePlayout);
|
||||
if (errVal < 0)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
|
||||
" playout snd_pcm_prepare failed (%s)\n",
|
||||
LATE(snd_strerror)(errVal));
|
||||
// just log error
|
||||
// if snd_pcm_open fails will return -1
|
||||
}
|
||||
|
||||
if (!_ptrThreadPlay->Start())
|
||||
{
|
||||
WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
|
||||
|
@ -1533,16 +1553,6 @@ int32_t AudioDeviceLinuxALSA::StartPlayout()
|
|||
}
|
||||
_ptrThreadPlay->SetPriority(kRealtimePriority);
|
||||
|
||||
int errVal = LATE(snd_pcm_prepare)(_handlePlayout);
|
||||
if (errVal < 0)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
|
||||
" playout snd_pcm_prepare failed (%s)\n",
|
||||
LATE(snd_strerror)(errVal));
|
||||
// just log error
|
||||
// if snd_pcm_open fails will return -1
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -1717,7 +1727,9 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo(
|
|||
const bool playback,
|
||||
const int32_t enumDeviceNo,
|
||||
char* enumDeviceName,
|
||||
const int32_t ednLen) const
|
||||
const int32_t ednLen,
|
||||
char* enumDeviceId,
|
||||
const int32_t ediLen) const
|
||||
{
|
||||
|
||||
// Device enumeration based on libjingle implementation
|
||||
|
@ -1756,6 +1768,8 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo(
|
|||
function == FUNC_GET_DEVICE_NAME_FOR_AN_ENUM) && enumDeviceNo == 0)
|
||||
{
|
||||
strcpy(enumDeviceName, "default");
|
||||
if (enumDeviceId)
|
||||
memset(enumDeviceId, 0, ediLen);
|
||||
|
||||
err = LATE(snd_device_name_free_hint)(hints);
|
||||
if (err != 0)
|
||||
|
@ -1818,6 +1832,11 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo(
|
|||
// We have found the enum device, copy the name to buffer.
|
||||
strncpy(enumDeviceName, desc, ednLen);
|
||||
enumDeviceName[ednLen-1] = '\0';
|
||||
if (enumDeviceId)
|
||||
{
|
||||
strncpy(enumDeviceId, name, ediLen);
|
||||
enumDeviceId[ediLen-1] = '\0';
|
||||
}
|
||||
keepSearching = false;
|
||||
// Replace '\n' with '-'.
|
||||
char * pret = strchr(enumDeviceName, '\n'/*0xa*/); //LF
|
||||
|
@ -1830,6 +1849,11 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo(
|
|||
// We have found the enum device, copy the name to buffer.
|
||||
strncpy(enumDeviceName, name, ednLen);
|
||||
enumDeviceName[ednLen-1] = '\0';
|
||||
if (enumDeviceId)
|
||||
{
|
||||
strncpy(enumDeviceId, name, ediLen);
|
||||
enumDeviceId[ediLen-1] = '\0';
|
||||
}
|
||||
keepSearching = false;
|
||||
}
|
||||
|
||||
|
@ -1854,7 +1878,7 @@ int32_t AudioDeviceLinuxALSA::GetDevicesInfo(
|
|||
LATE(snd_strerror)(err));
|
||||
// Continue and return true anyway, since we did get the whole list.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (FUNC_GET_NUM_OF_DEVICE == function)
|
||||
{
|
||||
|
@ -2139,6 +2163,11 @@ bool AudioDeviceLinuxALSA::RecThreadProcess()
|
|||
{ // buf is full
|
||||
_recordingFramesLeft = _recordingFramesIn10MS;
|
||||
|
||||
if (_firstRecord) {
|
||||
LOG_FIRST_CAPTURE(this);
|
||||
_firstRecord = false;
|
||||
}
|
||||
LOG_CAPTURE_FRAMES(this, _recordingFramesIn10MS);
|
||||
// store the recorded buffer (no action will be taken if the
|
||||
// #recorded samples is not a full buffer)
|
||||
_ptrAudioBuffer->SetRecordedBuffer(_recordingBuffer,
|
||||
|
|
|
@ -161,7 +161,9 @@ private:
|
|||
const bool playback,
|
||||
const int32_t enumDeviceNo = 0,
|
||||
char* enumDeviceName = NULL,
|
||||
const int32_t ednLen = 0) const;
|
||||
const int32_t ednLen = 0,
|
||||
char* enumDeviceID = NULL,
|
||||
const int32_t ediLen = 0) const;
|
||||
int32_t ErrorRecovery(int32_t error, snd_pcm_t* deviceHandle);
|
||||
|
||||
private:
|
||||
|
@ -225,6 +227,7 @@ private:
|
|||
private:
|
||||
bool _initialized;
|
||||
bool _recording;
|
||||
bool _firstRecord;
|
||||
bool _playing;
|
||||
bool _recIsInitialized;
|
||||
bool _playIsInitialized;
|
||||
|
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче